From ea413978571389ff11adf46087ed7100e9bb8f0d Mon Sep 17 00:00:00 2001 From: Fernando Date: Tue, 7 Jun 2016 12:58:28 -0300 Subject: [PATCH 001/134] Add PP missing parameter 'delete_on' to api --- sickbeard/server/api/core.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/sickbeard/server/api/core.py b/sickbeard/server/api/core.py index ee9a15738c..b82b072f15 100644 --- a/sickbeard/server/api/core.py +++ b/sickbeard/server/api/core.py @@ -1280,6 +1280,7 @@ class CMD_PostProcess(ApiCall): "return_data": {"desc": "Returns the result of the post-process"}, "process_method": {"desc": "How should valid post-processed files be handled"}, "is_priority": {"desc": "Replace the file even if it exists in a higher quality"}, + "delete_on": {"desc": "Delete files and folders"}, "failed": {"desc": "Mark download as failed"}, "type": {"desc": "The type of post-process being requested"}, } @@ -1294,6 +1295,7 @@ def __init__(self, args, kwargs): self.process_method, args = self.check_params(args, kwargs, "process_method", False, False, "string", ["copy", "symlink", "hardlink", "move"]) self.is_priority, args = self.check_params(args, kwargs, "is_priority", False, False, "bool", []) + self.delete_on, args = self.check_params(args, kwargs, "delete_on", False, False, "bool", []) self.failed, args = self.check_params(args, kwargs, "failed", False, False, "bool", []) self.type, args = self.check_params(args, kwargs, "type", "auto", None, "string", ["auto", "manual"]) # super, missing, help @@ -1311,7 +1313,8 @@ def run(self): self.type = "manual" data = processTV.processDir(self.path, process_method=self.process_method, force=self.force_replace, - is_priority=self.is_priority, failed=self.failed, proc_type=self.type) + is_priority=self.is_priority, delete_on=self.delete_on, failed=self.failed, + proc_type=self.type) if not self.return_data: data = "" From 0f7073fce94eec4db6a573ae087a2f4e0006edff Mon Sep 17 00:00:00 2001 From: Labrys Date: Tue, 7 Jun 2016 20:09:15 -0400 Subject: [PATCH 002/134] Minor rewording --- sickbeard/server/api/core.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sickbeard/server/api/core.py b/sickbeard/server/api/core.py index b82b072f15..f4da89be53 100644 --- a/sickbeard/server/api/core.py +++ b/sickbeard/server/api/core.py @@ -1280,7 +1280,7 @@ class CMD_PostProcess(ApiCall): "return_data": {"desc": "Returns the result of the post-process"}, "process_method": {"desc": "How should valid post-processed files be handled"}, "is_priority": {"desc": "Replace the file even if it exists in a higher quality"}, - "delete_on": {"desc": "Delete files and folders"}, + "delete_files": {"desc": "Delete files and folders like auto processing"}, "failed": {"desc": "Mark download as failed"}, "type": {"desc": "The type of post-process being requested"}, } @@ -1295,7 +1295,7 @@ def __init__(self, args, kwargs): self.process_method, args = self.check_params(args, kwargs, "process_method", False, False, "string", ["copy", "symlink", "hardlink", "move"]) self.is_priority, args = self.check_params(args, kwargs, "is_priority", False, False, "bool", []) - self.delete_on, args = self.check_params(args, kwargs, "delete_on", False, False, "bool", []) + self.delete_files, args = self.check_params(args, kwargs, "delete_files", False, False, "bool", []) self.failed, args = self.check_params(args, kwargs, "failed", False, False, "bool", []) self.type, args = self.check_params(args, kwargs, "type", "auto", None, "string", ["auto", "manual"]) # super, missing, help @@ -1313,7 +1313,7 @@ def run(self): self.type = "manual" data = processTV.processDir(self.path, process_method=self.process_method, force=self.force_replace, - is_priority=self.is_priority, delete_on=self.delete_on, failed=self.failed, + is_priority=self.is_priority, delete_on=self.delete_files, failed=self.failed, proc_type=self.type) if not self.return_data: From 1c9e97a6b5a103eab3f419acacce69e2788c7474 Mon Sep 17 00:00:00 2001 From: p0ps Date: Thu, 9 Jun 2016 19:28:17 +0200 Subject: [PATCH 003/134] Fixed failedDownoads page, when using limit = all (#678) Limit was passed as string --- sickbeard/server/web/manage/handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sickbeard/server/web/manage/handler.py b/sickbeard/server/web/manage/handler.py index 9d4cbce014..1470e1f1fd 100644 --- a/sickbeard/server/web/manage/handler.py +++ b/sickbeard/server/web/manage/handler.py @@ -696,7 +696,7 @@ def manageTorrents(self): def failedDownloads(self, limit=100, toRemove=None): failed_db_con = db.DBConnection('failed.db') - if limit: + if int(limit): sql_results = failed_db_con.select( b'SELECT * ' b'FROM failed ' From 05ac4f44e087586c01c73d0b1dac0e9294cd38e5 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sun, 12 Jun 2016 17:32:09 +0200 Subject: [PATCH 004/134] Duplicate imports --- sickbeard/server/web/__init__.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/sickbeard/server/web/__init__.py b/sickbeard/server/web/__init__.py index ece587b3af..31c0914462 100644 --- a/sickbeard/server/web/__init__.py +++ b/sickbeard/server/web/__init__.py @@ -36,12 +36,6 @@ HomeChangeLog, HomePostProcess, HomeAddShows, - Home, - HomeIRC, - HomeNews, - HomeChangeLog, - HomePostProcess, - HomeAddShows, ) from sickbeard.server.web.manage import ( Manage, From a8a79e58b8b75010c872a8669ef7dcc78424fef5 Mon Sep 17 00:00:00 2001 From: supergonkas Date: Tue, 14 Jun 2016 15:57:10 +0100 Subject: [PATCH 005/134] Add xspeeds icon (#685) --- gui/slick/images/providers/xspeeds.png | Bin 0 -> 573 bytes gui/slick/images/providers/xspeeds_eu.png | Bin 0 -> 573 bytes 2 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 gui/slick/images/providers/xspeeds.png create mode 100644 gui/slick/images/providers/xspeeds_eu.png diff --git a/gui/slick/images/providers/xspeeds.png b/gui/slick/images/providers/xspeeds.png new file mode 100644 index 0000000000000000000000000000000000000000..13ca7fdbf8e4e3ba2061ec105d170262b47ba773 GIT binary patch literal 573 zcmV-D0>b@?P)fPgWp6C5K=W~ufe*?b}1M2lUN~IDCg#z;VJc)l7i$y#>KB8K!@;R5w;r{*} zcXxNlX0sxI43yv(sZ`g{UteFW!{H!G1Ux-Gfr3Dt zP6rhIexKW1uh;ne{KRlLWDN!bjK^b4CKD0B?RC3d2(ef!aCLP>0IvG>_C~{_xp3a& z@sLCSwawaWHst56ghC-)US47}8sVQ{V6)kT-EJ2FOnrWSh7i-~6n?)Sfj|JkU=Wo` z1&v07jC`HtatW)|Dgt=Q`~4mtA0OgXdc7V)Tdfv#!~?F?Y6Rd}8I8sx;Pv&DO%D$b z+}iv5JN_5D-HunT)9FM2SAThVVYAt6hS%$*0Sclk(*Ak(7Z(>$DwR;FR7Zfz<-+;- zIhzdz15Qs*advhFyfo4}PO>U2|>&w3AJJ00000 LNkvXXu0mjfw1NQG literal 0 HcmV?d00001 diff --git a/gui/slick/images/providers/xspeeds_eu.png b/gui/slick/images/providers/xspeeds_eu.png new file mode 100644 index 0000000000000000000000000000000000000000..13ca7fdbf8e4e3ba2061ec105d170262b47ba773 GIT binary patch literal 573 zcmV-D0>b@?P)fPgWp6C5K=W~ufe*?b}1M2lUN~IDCg#z;VJc)l7i$y#>KB8K!@;R5w;r{*} zcXxNlX0sxI43yv(sZ`g{UteFW!{H!G1Ux-Gfr3Dt zP6rhIexKW1uh;ne{KRlLWDN!bjK^b4CKD0B?RC3d2(ef!aCLP>0IvG>_C~{_xp3a& z@sLCSwawaWHst56ghC-)US47}8sVQ{V6)kT-EJ2FOnrWSh7i-~6n?)Sfj|JkU=Wo` z1&v07jC`HtatW)|Dgt=Q`~4mtA0OgXdc7V)Tdfv#!~?F?Y6Rd}8I8sx;Pv&DO%D$b z+}iv5JN_5D-HunT)9FM2SAThVVYAt6hS%$*0Sclk(*Ak(7Z(>$DwR;FR7Zfz<-+;- zIhzdz15Qs*advhFyfo4}PO>U2|>&w3AJJ00000 LNkvXXu0mjfw1NQG literal 0 HcmV?d00001 From 5c74c48f0aaff4d6baccc853fc3844ee435b0977 Mon Sep 17 00:00:00 2001 From: Dario Date: Tue, 14 Jun 2016 16:59:23 +0200 Subject: [PATCH 006/134] Update subliminal to 2.0.3 (master), update Itasa (#682) --- lib/subliminal/__init__.py | 2 +- lib/subliminal/cli.py | 6 ++-- lib/subliminal/providers/itasa.py | 43 +++++++++++++++++--------- lib/subliminal/providers/legendastv.py | 2 +- 4 files changed, 35 insertions(+), 18 deletions(-) diff --git a/lib/subliminal/__init__.py b/lib/subliminal/__init__.py index 73b137e987..187e618b31 100644 --- a/lib/subliminal/__init__.py +++ b/lib/subliminal/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- __title__ = 'subliminal' -__version__ = '2.1.0.dev' +__version__ = '2.0.3' __short_version__ = '.'.join(__version__.split('.')[:2]) __author__ = 'Antoine Bertin' __license__ = 'MIT' diff --git a/lib/subliminal/cli.py b/lib/subliminal/cli.py index f6c5425a84..e2a78cf153 100644 --- a/lib/subliminal/cli.py +++ b/lib/subliminal/cli.py @@ -6,6 +6,7 @@ from __future__ import division from collections import defaultdict from datetime import timedelta +import glob import json import logging import os @@ -15,7 +16,7 @@ from babelfish import Error as BabelfishError, Language import click from dogpile.cache.backends.file import AbstractFileLock -from dogpile.core import ReadWriteMutex +from dogpile.util.readwrite_lock import ReadWriteMutex from six.moves import configparser from subliminal import (AsyncProviderPool, Episode, Movie, Video, __version__, check_video, compute_score, get_scores, @@ -266,7 +267,8 @@ def subliminal(ctx, addic7ed, itasa, legendastv, opensubtitles, subscenter, cach def cache(ctx, clear_subliminal): """Cache management.""" if clear_subliminal: - os.remove(os.path.join(ctx.parent.params['cache_dir'], cache_file)) + for file in glob.glob(os.path.join(ctx.parent.params['cache_dir'], cache_file) + '*'): + os.remove(file) click.echo('Subliminal\'s cache cleared.') else: click.echo('Nothing done.') diff --git a/lib/subliminal/providers/itasa.py b/lib/subliminal/providers/itasa.py index 3c01203086..f4478113ff 100644 --- a/lib/subliminal/providers/itasa.py +++ b/lib/subliminal/providers/itasa.py @@ -268,8 +268,15 @@ def _get_season_subtitles(self, show_id, season, sub_format): root = etree.fromstring(r.content) if int(root.find('data/count').text) == 0: - logger.warning('Subtitles for season not found') - return [] + logger.warning('Subtitles for season not found, try with rip suffix') + + params['version'] = sub_format + 'rip' + r = self.session.get(self.server_url + 'subtitles/search', params=params, timeout=30) + r.raise_for_status() + root = etree.fromstring(r.content) + if int(root.find('data/count').text) == 0: + logger.warning('Subtitles for season not found') + return [] subs = [] # Looking for subtitles in first page @@ -360,18 +367,26 @@ def query(self, series, season, episode, video_format, resolution, country=None) root = etree.fromstring(r.content) if int(root.find('data/count').text) == 0: - logger.warning('Subtitles not found') - # If no subtitle are found for single episode try to download all season zip - subs = self._get_season_subtitles(show_id, season, sub_format) - if subs: - for subtitle in subs: - subtitle.format = video_format - subtitle.year = year - subtitle.tvdb_id = tvdb_id - - return subs - else: - return [] + logger.warning('Subtitles not found, try with rip suffix') + + params['version'] = sub_format + 'rip' + r = self.session.get(self.server_url + 'subtitles/search', params=params, timeout=30) + r.raise_for_status() + root = etree.fromstring(r.content) + if int(root.find('data/count').text) == 0: + logger.warning('Subtitles not found, go season mode') + + # If no subtitle are found for single episode try to download all season zip + subs = self._get_season_subtitles(show_id, season, sub_format) + if subs: + for subtitle in subs: + subtitle.format = video_format + subtitle.year = year + subtitle.tvdb_id = tvdb_id + + return subs + else: + return [] # Looking for subtitles in first page for subtitle in root.findall('data/subtitles/subtitle'): diff --git a/lib/subliminal/providers/legendastv.py b/lib/subliminal/providers/legendastv.py index 7c3cc74d13..cdd16aca25 100644 --- a/lib/subliminal/providers/legendastv.py +++ b/lib/subliminal/providers/legendastv.py @@ -303,7 +303,7 @@ def get_archives(self, title_id, language_code): archives.append(archive) # stop on last page - if soup.find('a', attrs={'class': 'load_more'}, text='carregar mais') is None: + if soup.find('a', attrs={'class': 'load_more'}, string='carregar mais') is None: break # increment page count From 5878701a31da668f4ac370cf5b45b5931a098280 Mon Sep 17 00:00:00 2001 From: Labrys of Knossos Date: Wed, 15 Jun 2016 04:33:20 -0400 Subject: [PATCH 007/134] Unicode mass edit (#688) * Fix unicode error when changing root directories with mass edit * Fix typo --- sickbeard/server/web/home/add_shows.py | 4 ++-- sickbeard/server/web/home/handler.py | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/sickbeard/server/web/home/add_shows.py b/sickbeard/server/web/home/add_shows.py index 6bbe2ea9ec..d651b7a823 100644 --- a/sickbeard/server/web/home/add_shows.py +++ b/sickbeard/server/web/home/add_shows.py @@ -149,7 +149,7 @@ def massAddTable(self, rootDir=None): dir_results = main_db_con.select( b'SELECT indexer_id ' b'FROM tv_shows ' - b'WHERE location = ? LIMIT 1', + b'WHERE location = ? LIMIT 1', [cur_path] ) @@ -534,7 +534,7 @@ def finishAddShow(): series_pieces = whichSeries.split('|') if (whichSeries and rootDir) or (whichSeries and fullShowPath and len(series_pieces) > 1): if len(series_pieces) < 6: - logger.log(u'Unable to add show due to show selection. Not anough arguments: %s' % (repr(series_pieces)), + logger.log(u'Unable to add show due to show selection. Not enough arguments: %s' % (repr(series_pieces)), logger.ERROR) ui.notifications.error('Unknown error. Unable to add show due to problem with show selection.') return self.redirect('/addShows/existingShows/') diff --git a/sickbeard/server/web/home/handler.py b/sickbeard/server/web/home/handler.py index a22ab3d019..1812c19628 100644 --- a/sickbeard/server/web/home/handler.py +++ b/sickbeard/server/web/home/handler.py @@ -1369,7 +1369,6 @@ def editShow(self, show=None, location=None, anyQualities=[], bestQualities=[], show_obj.rls_ignore_words = rls_ignore_words.strip() show_obj.rls_require_words = rls_require_words.strip() - location = location.decode('UTF-8') # if we change location clear the db of episodes, change it, write to db, and rescan old_location = ek(os.path.normpath, show_obj._location) new_location = ek(os.path.normpath, location) From 179af4bed7725ae2c558db42bc019398bfebc3ec Mon Sep 17 00:00:00 2001 From: Labrys of Knossos Date: Fri, 17 Jun 2016 11:22:18 -0400 Subject: [PATCH 008/134] Fix notifications (#694) * Fix wording * Fix #693 - UnicodeError in notifications --- sickbeard/search_queue.py | 4 ++-- sickbeard/server/web/core/base.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/sickbeard/search_queue.py b/sickbeard/search_queue.py index 533bd0c9b5..e4e6f7c810 100644 --- a/sickbeard/search_queue.py +++ b/sickbeard/search_queue.py @@ -331,10 +331,10 @@ def run(self): self.results = search_result self.success = True if self.manual_search_type == 'season': - ui.notifications.message("We have found season pack results for {0}".format(self.show.name), + ui.notifications.message("We have found season packs for {0}".format(self.show.name), "These should become visible in the manual select page.") else: - ui.notifications.message("We have found single results for {0}".format(self.segment[0].prettyName()), + ui.notifications.message("We have found results for {0}".format(self.segment[0].prettyName()), "These should become visible in the manual select page.") else: ui.notifications.message('No results were found') diff --git a/sickbeard/server/web/core/base.py b/sickbeard/server/web/core/base.py index e997bb2cf4..20c4d7a477 100644 --- a/sickbeard/server/web/core/base.py +++ b/sickbeard/server/web/core/base.py @@ -472,9 +472,9 @@ def get_messages(self): cur_notification_num = 1 for cur_notification in ui.notifications.get_notifications(self.request.remote_ip): messages['notification-{number}'.format(number=cur_notification_num)] = { - 'title': cur_notification.title, - 'message': cur_notification.message, - 'type': cur_notification.type, + 'title': '{0}'.format(cur_notification.title), + 'message': '{0}'.format(cur_notification.message), + 'type': '{0}'.format(cur_notification.type), } cur_notification_num += 1 From 9882225e07201c181176eeff450e9167246487e9 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Tue, 24 May 2016 19:30:54 +0200 Subject: [PATCH 009/134] Just some code cleaning * changed % to format * cut long lines --- sickbeard/tvcache.py | 154 ++++++++++++++++++++++--------------------- 1 file changed, 80 insertions(+), 74 deletions(-) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 06b543f12a..1456bdb622 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -32,68 +32,70 @@ class CacheDBConnection(db.DBConnection): - def __init__(self, providerName): + def __init__(self, provider_id): db.DBConnection.__init__(self, 'cache.db') # Create the table if it's not already there try: - if not self.hasTable(providerName): - logger.log(u"Creating cache table for provider {}".format(providerName), logger.DEBUG) + if not self.hasTable(provider_id): + logger.log(u'Creating cache table for provider {0}'.format(provider_id), logger.DEBUG) self.action( - "CREATE TABLE [" + providerName + "] (name TEXT, season NUMERIC, episodes TEXT, indexerid NUMERIC, url TEXT, time NUMERIC, quality NUMERIC, release_group TEXT)") + 'CREATE TABLE [{0}] (name TEXT, season NUMERIC, episodes TEXT, indexerid NUMERIC,' + 'url TEXT, time NUMERIC, quality NUMERIC, release_group TEXT)'.format(provider_id)) else: - sql_results = self.select("SELECT url, COUNT(url) AS count FROM [" + providerName + "] GROUP BY url HAVING count > 1") + sql_results = self.select('SELECT url, COUNT(url) AS count FROM [{0}] ' + 'GROUP BY url HAVING count > 1'.format(provider_id)) for cur_dupe in sql_results: - self.action("DELETE FROM [" + providerName + "] WHERE url = ?", [cur_dupe["url"]]) + self.action('DELETE FROM [{0}] WHERE url = ?'.format(provider_id), [cur_dupe['url']]) # remove wrong old index - self.action("DROP INDEX IF EXISTS idx_url") + self.action('DROP INDEX IF EXISTS idx_url') # add unique index to prevent further dupes from happening if one does not exist - logger.log(u"Creating UNIQUE URL index for {}".format(providerName), logger.DEBUG) - self.action("CREATE UNIQUE INDEX IF NOT EXISTS idx_url_" + providerName + " ON [" + providerName + "] (url)") + logger.log(u'Creating UNIQUE URL index for {0}'.format(provider_id), logger.DEBUG) + self.action('CREATE UNIQUE INDEX IF NOT EXISTS idx_url_{0} ON [{1}] (url)'.format(provider_id, provider_id)) # add release_group column to table if missing - if not self.hasColumn(providerName, 'release_group'): - self.addColumn(providerName, 'release_group', "TEXT", "") + if not self.hasColumn(provider_id, 'release_group'): + self.addColumn(provider_id, 'release_group', 'TEXT', '') # add version column to table if missing - if not self.hasColumn(providerName, 'version'): - self.addColumn(providerName, 'version', "NUMERIC", "-1") + if not self.hasColumn(provider_id, 'version'): + self.addColumn(provider_id, 'version', 'NUMERIC', '-1') # add seeders column to table if missing - if not self.hasColumn(providerName, 'seeders'): - self.addColumn(providerName, 'seeders', "NUMERIC", "-1") + if not self.hasColumn(provider_id, 'seeders'): + self.addColumn(provider_id, 'seeders', 'NUMERIC', '-1') # add leechers column to table if missing - if not self.hasColumn(providerName, 'leechers'): - self.addColumn(providerName, 'leechers', "NUMERIC", "-1") + if not self.hasColumn(provider_id, 'leechers'): + self.addColumn(provider_id, 'leechers', 'NUMERIC', '-1') # add size column to table if missing - if not self.hasColumn(providerName, 'size'): - self.addColumn(providerName, 'size', "NUMERIC", "-1") + if not self.hasColumn(provider_id, 'size'): + self.addColumn(provider_id, 'size', 'NUMERIC', '-1') # add pubdate column to table if missing - if not self.hasColumn(providerName, 'pubdate'): - self.addColumn(providerName, 'pubdate', "NUMERIC", "") + if not self.hasColumn(provider_id, 'pubdate'): + self.addColumn(provider_id, 'pubdate', 'NUMERIC', '') # add hash column to table if missing - if not self.hasColumn(providerName, 'hash'): - self.addColumn(providerName, 'hash', "NUMERIC", "") + if not self.hasColumn(provider_id, 'hash'): + self.addColumn(provider_id, 'hash', 'NUMERIC', '') except Exception as e: - if str(e) != "table [" + providerName + "] already exists": + if str(e) != 'table [{0}] already exists'.format(provider_id): raise # Create the table if it's not already there try: if not self.hasTable('lastUpdate'): - self.action("CREATE TABLE lastUpdate (provider TEXT, time NUMERIC)") + self.action('CREATE TABLE lastUpdate (provider TEXT, time NUMERIC)') except Exception as e: - logger.log(u"Error while searching " + self.provider.name + ", skipping: " + repr(e), logger.DEBUG) + logger.log(u'Error while searching {0}, skipping: {1!r}'.format(self.provider.name, e), logger.DEBUG) logger.log(traceback.format_exc(), logger.DEBUG) - if str(e) != "table lastUpdate already exists": + if str(e) != 'table lastUpdate already exists': raise @@ -132,7 +134,7 @@ def trim_cache(self, days=None): retention_period = now - (days * 86400) logger.log(u'Removing cache entries older than {x} days from {provider}'.format (x=days, provider=self.providerID)) - cache_db_con = self._getDB() + cache_db_con = self._get_db() cache_db_con.action( b'DELETE FROM [{provider}] ' b'WHERE time < ? '.format(provider=self.providerID), @@ -196,26 +198,27 @@ def updateCache(self): cache_db_con.mass_action(cl) except AuthException as e: - logger.log(u"Authentication error: " + ex(e), logger.ERROR) + logger.log(u'Authentication error: {0!r}'.format(e), logger.ERROR) except Exception as e: - logger.log(u"Error while searching " + self.provider.name + ", skipping: " + repr(e), logger.DEBUG) + logger.log(u'Error while searching {0}, skipping: {1!r}'.format(self.provider.name, e), logger.DEBUG) def update_cache_manual_search(self, manual_data=None): try: cl = [] for item in manual_data: - logger.log(u"Adding to cache item found in manual search: {}".format(item.name), logger.DEBUG) + logger.log(u'Adding to cache item found in manual search: {0}'.format(item.name), logger.DEBUG) ci = self._addCacheEntry(item.name, item.url, item.seeders, item.leechers, item.size, item.pubdate, item.hash) if ci is not None: cl.append(ci) except Exception as e: - logger.log(u"Error while adding to cache item found in manual seach for provider " + self.provider.name + ", skipping: " + repr(e), logger.WARNING) + logger.log(u'Error while adding to cache item found in manual seach for provider {0}, skipping: {1!r}'.format + (self.provider.name, e), logger.WARNING) results = [] cache_db_con = self._getDB() if cl: - logger.log("Mass updating cache table with manual results for provider: {}".format(self.provider.name), logger.DEBUG) + logger.log(u'Mass updating cache table with manual results for provider: {0}'.format(self.provider.name), logger.DEBUG) results = cache_db_con.mass_action(cl) return any(results) @@ -227,7 +230,7 @@ def getRSSFeed(self, url, params=None): @staticmethod def _translateTitle(title): - return u'' + title.replace(' ', '.') + return u'{0}'.format(title.replace(' ', '.')) @staticmethod def _translateLinkURL(url): @@ -250,18 +253,17 @@ def _parseItem(self, item): return self._addCacheEntry(title, url, seeders, leechers, size, pubdate, hash) else: - logger.log( - u"The data returned from the " + self.provider.name + " feed is incomplete, this result is unusable", - logger.DEBUG) + logger.log(u'The data returned from the {0} feed is incomplete, this result is unusable'.format + (self.provider.name), logger.DEBUG) return False def _getLastUpdate(self): cache_db_con = self._getDB() - sql_results = cache_db_con.select("SELECT time FROM lastUpdate WHERE provider = ?", [self.providerID]) + sql_results = cache_db_con.select('SELECT time FROM lastUpdate WHERE provider = ?', [self.providerID]) if sql_results: - lastTime = int(sql_results[0]["time"]) + lastTime = int(sql_results[0]['time']) if lastTime > int(time.mktime(datetime.datetime.today().timetuple())): lastTime = 0 else: @@ -271,10 +273,10 @@ def _getLastUpdate(self): def _getLastSearch(self): cache_db_con = self._getDB() - sql_results = cache_db_con.select("SELECT time FROM lastSearch WHERE provider = ?", [self.providerID]) + sql_results = cache_db_con.select('SELECT time FROM lastSearch WHERE provider = ?', [self.providerID]) if sql_results: - lastTime = int(sql_results[0]["time"]) + lastTime = int(sql_results[0]['time']) if lastTime > int(time.mktime(datetime.datetime.today().timetuple())): lastTime = 0 else: @@ -288,7 +290,7 @@ def setLastUpdate(self, toDate=None): cache_db_con = self._getDB() cache_db_con.upsert( - "lastUpdate", + 'lastUpdate', {'time': int(time.mktime(toDate.timetuple()))}, {'provider': self.providerID} ) @@ -299,7 +301,7 @@ def setLastSearch(self, toDate=None): cache_db_con = self._getDB() cache_db_con.upsert( - "lastSearch", + 'lastSearch', {'time': int(time.mktime(toDate.timetuple()))}, {'provider': self.providerID} ) @@ -310,7 +312,8 @@ def setLastSearch(self, toDate=None): def shouldUpdate(self): # if we've updated recently then skip the update if datetime.datetime.today() - self.lastUpdate < datetime.timedelta(minutes=self.minTime): - logger.log(u"Last update was too soon, using old cache: " + str(self.lastUpdate) + ". Updated less then " + str(self.minTime) + " minutes ago", logger.DEBUG) + logger.log(u'Last update was too soon, using old cache: {0}. Updated less then {1} minutes ago.'.format + (self.lastUpdate, self.minTime), logger.DEBUG) return False return True @@ -327,7 +330,7 @@ def _addCacheEntry(self, name, url, seeders, leechers, size, pubdate, hash): try: parse_result = NameParser().parse(name) except (InvalidNameException, InvalidShowException) as error: - logger.log(u"{}".format(error), logger.DEBUG) + logger.log(u'{0}'.format(error), logger.DEBUG) return None if not parse_result or not parse_result.series_name: @@ -339,7 +342,7 @@ def _addCacheEntry(self, name, url, seeders, leechers, size, pubdate, hash): if season is not None and episodes is not None: # store episodes as a seperated string - episodeText = "|" + "|".join({str(episode) for episode in episodes if episode}) + "|" + episodeText = '|{0}|'.format('|'.join({str(episode) for episode in episodes if episode})) # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) @@ -355,11 +358,13 @@ def _addCacheEntry(self, name, url, seeders, leechers, size, pubdate, hash): # get version version = parse_result.version - logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG) + logger.log(u'Added RSS item: [{0}] to cache: [{1}]'.format(name, self.providerID), logger.DEBUG) return [ - "INSERT OR REPLACE INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality, release_group, version, seeders, leechers, size, pubdate, hash) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)", - [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version, seeders, leechers, size, pubdate, hash]] + 'INSERT OR REPLACE INTO [{0}] (name, season, episodes, indexerid, url, time, quality, release_group, ' + 'version, seeders, leechers, size, pubdate, hash) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)'.format(self.providerID), + [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, + release_group, version, seeders, leechers, size, pubdate, hash]] def searchCache(self, episode, forced_search=False, downCurQuality=False): neededEps = self.findNeededEpisodes(episode, forced_search, downCurQuality) @@ -367,10 +372,10 @@ def searchCache(self, episode, forced_search=False, downCurQuality=False): def listPropers(self, date=None): cache_db_con = self._getDB() - sql = "SELECT * FROM [" + self.providerID + "] WHERE name LIKE '%.PROPER.%' OR name LIKE '%.REPACK.%'" + sql = "SELECT * FROM [{0}] WHERE name LIKE '%.PROPER.%' OR name LIKE '%.REPACK.%'".format(self.providerID) if date is not None: - sql += " AND time >= " + str(int(time.mktime(date.timetuple()))) + sql += ' AND time >= {0}'.format(int(time.mktime(date.timetuple()))) propers_results = cache_db_con.select(sql) return [x for x in propers_results if x['indexerid']] @@ -381,17 +386,18 @@ def findNeededEpisodes(self, episode, forced_search=False, downCurQuality=False) cache_db_con = self._getDB() if not episode: - sql_results = cache_db_con.select("SELECT * FROM [" + self.providerID + "]") + sql_results = cache_db_con.select('SELECT * FROM [{0}]'.format(self.providerID)) elif not isinstance(episode, list): sql_results = cache_db_con.select( - "SELECT * FROM [" + self.providerID + "] WHERE indexerid = ? AND season = ? AND episodes LIKE ?", - [episode.show.indexerid, episode.season, "%|" + str(episode.episode) + "|%"]) + 'SELECT * FROM [{0}] WHERE indexerid = ? AND season = ? AND episodes LIKE ?'.format(self.providerID), + [episode.show.indexerid, episode.season, '%|{0}|%'.format(episode.episode)]) else: for epObj in episode: cl.append([ - "SELECT * FROM [" + self.providerID + "] WHERE indexerid = ? AND season = ? AND episodes LIKE ? AND quality IN (" + ",".join( - [str(x) for x in epObj.wantedQuality]) + ")", - [epObj.show.indexerid, epObj.season, "%|" + str(epObj.episode) + "|%"]]) + 'SELECT * FROM [{0}] WHERE indexerid = ? AND season = ? AND episodes LIKE ? AND quality IN ({1})'.format + (self.providerID, ','.join( + [str(x) for x in epObj.wantedQuality])), + [epObj.show.indexerid, epObj.season, '%|{0}|%'.format(epObj.episode)]]) sql_results = cache_db_con.mass_action(cl, fetchall=True) sql_results = list(itertools.chain(*sql_results)) @@ -399,55 +405,55 @@ def findNeededEpisodes(self, episode, forced_search=False, downCurQuality=False) # for each cache entry for curResult in sql_results: # ignored/required words, and non-tv junk - if not show_name_helpers.filterBadReleases(curResult["name"]): + if not show_name_helpers.filterBadReleases(curResult['name']): continue # get the show object, or if it's not one of our shows then ignore it - showObj = Show.find(sickbeard.showList, int(curResult["indexerid"])) + showObj = Show.find(sickbeard.showList, int(curResult['indexerid'])) if not showObj: continue # skip if provider is anime only and show is not anime if self.provider.anime_only and not showObj.is_anime: - logger.log(u"" + str(showObj.name) + " is not an anime, skiping", logger.DEBUG) + logger.log(u'{0} is not an anime, skiping'.format(showObj.name), logger.DEBUG) continue # get season and ep data (ignoring multi-eps for now) - curSeason = int(curResult["season"]) + curSeason = int(curResult['season']) if curSeason == -1: continue - curEp = curResult["episodes"].split("|")[1] + curEp = curResult['episodes'].split('|')[1] if not curEp: continue curEp = int(curEp) - curQuality = int(curResult["quality"]) - curReleaseGroup = curResult["release_group"] - curVersion = curResult["version"] + curQuality = int(curResult['quality']) + curReleaseGroup = curResult['release_group'] + curVersion = curResult['version'] # if the show says we want that episode then add it to the list if not showObj.wantEpisode(curSeason, curEp, curQuality, forced_search, downCurQuality): - logger.log(u"Ignoring " + curResult["name"], logger.DEBUG) + logger.log(u'Ignoring {0}'.format(curResult['name']), logger.DEBUG) continue epObj = showObj.getEpisode(curSeason, curEp) # build a result object - title = curResult["name"] - url = curResult["url"] + title = curResult['name'] + url = curResult['url'] - logger.log(u"Found result " + title + " at " + url) + logger.log(u'Found result {0} at {1}'.format(title, url)) result = self.provider.get_result([epObj]) result.show = showObj result.url = url - result.seeders = curResult["seeders"] - result.leechers = curResult["leechers"] - result.size = curResult["size"] - result.pubdate = curResult["pubdate"] - result.hash = curResult["hash"] + result.seeders = curResult['seeders'] + result.leechers = curResult['leechers'] + result.size = curResult['size'] + result.pubdate = curResult['pubdate'] + result.hash = curResult['hash'] result.name = title result.quality = curQuality result.release_group = curReleaseGroup From e06050399af3d5a4e75497bf281b5f1230162a6d Mon Sep 17 00:00:00 2001 From: P0psicles Date: Tue, 24 May 2016 22:03:15 +0200 Subject: [PATCH 010/134] Some cleaning, and implemented a get_last_cached_items() method, for some testing. --- sickbeard/tvcache.py | 53 ++++++++++++++++++++++---------------------- 1 file changed, 26 insertions(+), 27 deletions(-) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 1456bdb622..ca812cc836 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -102,17 +102,17 @@ def __init__(self, provider_id): class TVCache(object): def __init__(self, provider, **kwargs): self.provider = provider - self.providerID = self.provider.get_id() - self.providerDB = None + self.provider_id = self.provider.get_id() + self.provider_db = None self.minTime = kwargs.pop(u'min_time', 10) self.search_params = kwargs.pop(u'search_params', dict(RSS=[''])) - def _getDB(self): + def _get_db(self): # init provider database if not done already - if not self.providerDB: - self.providerDB = CacheDBConnection(self.providerID) + if not self.provider_db: + self.provider_db = CacheDBConnection(self.provider_id) - return self.providerDB + return self.provider_db def _clearCache(self): """ @@ -193,7 +193,7 @@ def updateCache(self): if ci is not None: cl.append(ci) - cache_db_con = self._getDB() + cache_db_con = self._get_db() if cl: cache_db_con.mass_action(cl) @@ -216,7 +216,7 @@ def update_cache_manual_search(self, manual_data=None): (self.provider.name, e), logger.WARNING) results = [] - cache_db_con = self._getDB() + cache_db_con = self._get_db() if cl: logger.log(u'Mass updating cache table with manual results for provider: {0}'.format(self.provider.name), logger.DEBUG) results = cache_db_con.mass_action(cl) @@ -241,7 +241,6 @@ def _parseItem(self, item): seeders, leechers = self._get_result_info(item) size = self._get_size(item) pubdate = self._get_pubdate(item) - hash = self._get_hash(item) self._checkItemAuth(title, url) @@ -249,8 +248,8 @@ def _parseItem(self, item): title = self._translateTitle(title) url = self._translateLinkURL(url) - # logger.log(u"Attempting to add item to cache: " + title, logger.DEBUG) - return self._addCacheEntry(title, url, seeders, leechers, size, pubdate, hash) + # Placed the self._get_hash(item) inline, because hash is a buildin. Could cause issues. + return self._addCacheEntry(title, url, seeders, leechers, size, pubdate, self._get_hash(item)) else: logger.log(u'The data returned from the {0} feed is incomplete, this result is unusable'.format @@ -259,8 +258,8 @@ def _parseItem(self, item): return False def _getLastUpdate(self): - cache_db_con = self._getDB() - sql_results = cache_db_con.select('SELECT time FROM lastUpdate WHERE provider = ?', [self.providerID]) + cache_db_con = self._get_db() + sql_results = cache_db_con.select('SELECT time FROM lastUpdate WHERE provider = ?', [self.provider_id]) if sql_results: lastTime = int(sql_results[0]['time']) @@ -272,8 +271,8 @@ def _getLastUpdate(self): return datetime.datetime.fromtimestamp(lastTime) def _getLastSearch(self): - cache_db_con = self._getDB() - sql_results = cache_db_con.select('SELECT time FROM lastSearch WHERE provider = ?', [self.providerID]) + cache_db_con = self._get_db() + sql_results = cache_db_con.select('SELECT time FROM lastSearch WHERE provider = ?', [self.provider_id]) if sql_results: lastTime = int(sql_results[0]['time']) @@ -288,22 +287,22 @@ def setLastUpdate(self, toDate=None): if not toDate: toDate = datetime.datetime.today() - cache_db_con = self._getDB() + cache_db_con = self._get_db() cache_db_con.upsert( 'lastUpdate', {'time': int(time.mktime(toDate.timetuple()))}, - {'provider': self.providerID} + {'provider': self.provider_id} ) def setLastSearch(self, toDate=None): if not toDate: toDate = datetime.datetime.today() - cache_db_con = self._getDB() + cache_db_con = self._get_db() cache_db_con.upsert( 'lastSearch', {'time': int(time.mktime(toDate.timetuple()))}, - {'provider': self.providerID} + {'provider': self.provider_id} ) lastUpdate = property(_getLastUpdate) @@ -358,11 +357,11 @@ def _addCacheEntry(self, name, url, seeders, leechers, size, pubdate, hash): # get version version = parse_result.version - logger.log(u'Added RSS item: [{0}] to cache: [{1}]'.format(name, self.providerID), logger.DEBUG) + logger.log(u'Added RSS item: [{0}] to cache: [{1}]'.format(name, self.provider_id), logger.DEBUG) return [ 'INSERT OR REPLACE INTO [{0}] (name, season, episodes, indexerid, url, time, quality, release_group, ' - 'version, seeders, leechers, size, pubdate, hash) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)'.format(self.providerID), + 'version, seeders, leechers, size, pubdate, hash) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)'.format(self.provider_id), [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version, seeders, leechers, size, pubdate, hash]] @@ -371,8 +370,8 @@ def searchCache(self, episode, forced_search=False, downCurQuality=False): return neededEps[episode] if episode in neededEps else [] def listPropers(self, date=None): - cache_db_con = self._getDB() - sql = "SELECT * FROM [{0}] WHERE name LIKE '%.PROPER.%' OR name LIKE '%.REPACK.%'".format(self.providerID) + cache_db_con = self._get_db() + sql = "SELECT * FROM [{0}] WHERE name LIKE '%.PROPER.%' OR name LIKE '%.REPACK.%'".format(self.provider_id) if date is not None: sql += ' AND time >= {0}'.format(int(time.mktime(date.timetuple()))) @@ -384,18 +383,18 @@ def findNeededEpisodes(self, episode, forced_search=False, downCurQuality=False) neededEps = {} cl = [] - cache_db_con = self._getDB() + cache_db_con = self._get_db() if not episode: - sql_results = cache_db_con.select('SELECT * FROM [{0}]'.format(self.providerID)) + sql_results = cache_db_con.select('SELECT * FROM [{0}]'.format(self.provider_id)) elif not isinstance(episode, list): sql_results = cache_db_con.select( - 'SELECT * FROM [{0}] WHERE indexerid = ? AND season = ? AND episodes LIKE ?'.format(self.providerID), + 'SELECT * FROM [{0}] WHERE indexerid = ? AND season = ? AND episodes LIKE ?'.format(self.provider_id), [episode.show.indexerid, episode.season, '%|{0}|%'.format(episode.episode)]) else: for epObj in episode: cl.append([ 'SELECT * FROM [{0}] WHERE indexerid = ? AND season = ? AND episodes LIKE ? AND quality IN ({1})'.format - (self.providerID, ','.join( + (self.provider_id, ','.join( [str(x) for x in epObj.wantedQuality])), [epObj.show.indexerid, epObj.season, '%|{0}|%'.format(epObj.episode)]]) From a76e364273383798471540ab93a45db45bdaa5cc Mon Sep 17 00:00:00 2001 From: P0psicles Date: Wed, 25 May 2016 15:30:52 +0200 Subject: [PATCH 011/134] First version of the parse reducing. Used a new table, that's used for all providers, to keep track of 5 newest releases. Then all results are matched to this table. Making it universal for all providers, and reducing the amounts of parses. * Made sure that cache items older then 7 days are also deleted for this table. * In the process of cleaning the cache after 7 days, noticed the other one never works. Maybe it's intentional, but then it should be disabled. --- sickbeard/databases/cache_db.py | 10 ++++++ sickbeard/tvcache.py | 56 ++++++++++++++++++++++++++++++--- 2 files changed, 62 insertions(+), 4 deletions(-) diff --git a/sickbeard/databases/cache_db.py b/sickbeard/databases/cache_db.py index 7e5549bb83..7a9525ae03 100644 --- a/sickbeard/databases/cache_db.py +++ b/sickbeard/databases/cache_db.py @@ -124,3 +124,13 @@ def execute(self): self.connection.action("CREATE TABLE scene_names (indexer_id INTEGER, name TEXT);") self.connection.action("INSERT INTO scene_names SELECT * FROM tmp_scene_names;") self.connection.action("DROP TABLE tmp_scene_names;") + + +class AddProviderRssCache(ConvertSceneNamesToIndexerScheme): # pylint:disable=too-many-ancestors + """A provider cache table thats used to keep track of the last parsed search results""" + def test(self): + return self.hasTable("provider_rss_cache") + + def execute(self): + self.connection.action( + "CREATE TABLE provider_rss_cache (rss_cache_id INTEGER PRIMARY KEY, name TEXT, url TEXT, time NUMERIC DEFAULT 0, provider_id TEXT NOT NULL);") diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index ca812cc836..70b3388168 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -114,6 +114,12 @@ def _get_db(self): return self.provider_db + def _clearProviderRssCache(self): + cache_db_con = self._get_db() + today = int(time.mktime(datetime.datetime.today().timetuple())) + # Keep item in cache for 7 days + cache_db_con.action('DELETE FROM provider_rss_cache WHERE provider_id = ? AND time < ? ', [self.provider_id, today - 7 * 86400]) # 86400 POSIX day (exact value) + def _clearCache(self): """ Performs requalar cache cleaning as required @@ -122,6 +128,7 @@ def _clearCache(self): if sickbeard.CACHE_TRIMMING: # trim items older than MAX_CACHE_AGE days self.trim_cache(days=sickbeard.MAX_CACHE_AGE) + self._clearProviderRssCache() def trim_cache(self, days=None): """ @@ -187,16 +194,35 @@ def updateCache(self): # set updated self.setLastUpdate() + # get last 5 provider_rss_cache results + recent_results = self.get_last_cached_items(5) + found_recent_results = 0 + stop_at = 1 + cl = [] - for item in data['entries'] or []: - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) + index = 0 + for index, item in enumerate(data['entries'] or []): + if recent_results and item.get('link').strip() in [cache_item['url'].strip() for cache_item in recent_results]: + found_recent_results += 1 + + if found_recent_results >= stop_at: + logger.log(u'Hit the old cached items, not parsing any more for: {0}'.format + (self.provider_id), logger.ERROR) + break + try: + ci = self._parseItem(item) + if ci is not None: + cl.append(ci) + except UnicodeDecodeError, e: + continue cache_db_con = self._get_db() if cl: cache_db_con.mass_action(cl) + # finished processing, let's save the newest x (index) items and store these in cache with a max of 5 + self._update_provider_rss_cache(data['entries'][0:min(index, 5)]) + except AuthException as e: logger.log(u'Authentication error: {0!r}'.format(e), logger.ERROR) except Exception as e: @@ -223,6 +249,28 @@ def update_cache_manual_search(self, manual_data=None): return any(results) + def _update_provider_rss_cache(self, items): + """Updates the table provider_rss_cache with a limited amount of the latest search result url's""" + + cache_db_con = self._get_db() + new_items = [] + sql_results = [] + + for item in items: + # get the current timestamp + cur_time = int(time.mktime(datetime.datetime.today().timetuple())) + + logger.log(u"Added provider_rss_cache item: {0}".format(item.get('link'), self.provider_id), logger.DEBUG) + + new_items.append(["INSERT OR REPLACE INTO provider_rss_cache (name, url, time, provider_id) VALUES (?,?,?,?)", + [item.get('title'), item.get('link'), cur_time, self.provider_id]]) + + if new_items: + logger.log(u'Mass updating provider_rss_cache table with results for provider: {0}'.format(self.provider.name), logger.DEBUG) + sql_results = cache_db_con.mass_action(new_items) + + return any(sql_results) + def getRSSFeed(self, url, params=None): if self.provider.login(): return getFeed(url, params=params, request_hook=self.provider.get_url) From 1effe0403774e5cce0ed2d5d1a495ae8a88d6dd2 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Wed, 25 May 2016 15:33:21 +0200 Subject: [PATCH 012/134] Missed the camelCase --- sickbeard/tvcache.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 70b3388168..5dd5f35bf9 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -114,7 +114,7 @@ def _get_db(self): return self.provider_db - def _clearProviderRssCache(self): + def _clear_provider_rss_cache(self): cache_db_con = self._get_db() today = int(time.mktime(datetime.datetime.today().timetuple())) # Keep item in cache for 7 days @@ -128,7 +128,7 @@ def _clearCache(self): if sickbeard.CACHE_TRIMMING: # trim items older than MAX_CACHE_AGE days self.trim_cache(days=sickbeard.MAX_CACHE_AGE) - self._clearProviderRssCache() + self._clear_provider_rss_cache() def trim_cache(self, days=None): """ From 43184ccf9ac73021b28038fee68b8b34b4ebac62 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Thu, 26 May 2016 23:13:16 +0200 Subject: [PATCH 013/134] Removed the sqlite solution, and replaced with a dict[provider][{}]. Still need to put in the logic to only remember last 5 items per provider key. --- sickbeard/__init__.py | 5 +++- sickbeard/databases/cache_db.py | 10 -------- sickbeard/tvcache.py | 35 +++------------------------ sickrage/providers/GenericProvider.py | 11 +++++++++ 4 files changed, 18 insertions(+), 43 deletions(-) diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py index 809da9f5eb..65f1326d99 100644 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -606,6 +606,8 @@ PRIVACY_LEVEL = 'normal' +provider_recent_results = {} + def get_backlog_cycle_time(): cycletime = DAILYSEARCH_FREQUENCY * 2 + 7 @@ -659,7 +661,8 @@ def initialize(consoleLogging=True): # pylint: disable=too-many-locals, too-man AUTOPOSTPROCESSOR_FREQUENCY, SHOWUPDATE_HOUR, \ ANIME_DEFAULT, NAMING_ANIME, ANIMESUPPORT, USE_ANIDB, ANIDB_USERNAME, ANIDB_PASSWORD, ANIDB_USE_MYLIST, \ ANIME_SPLIT_HOME, SCENE_DEFAULT, DOWNLOAD_URL, BACKLOG_DAYS, GIT_USERNAME, GIT_PASSWORD, \ - DEVELOPER, gh, DISPLAY_ALL_SEASONS, SSL_VERIFY, NEWS_LAST_READ, NEWS_LATEST, SOCKET_TIMEOUT, RECENTLY_DELETED + DEVELOPER, gh, DISPLAY_ALL_SEASONS, SSL_VERIFY, NEWS_LAST_READ, NEWS_LATEST, SOCKET_TIMEOUT, RECENTLY_DELETED, \ + provider_recent_results if __INITIALIZED__: return False diff --git a/sickbeard/databases/cache_db.py b/sickbeard/databases/cache_db.py index 7a9525ae03..7e5549bb83 100644 --- a/sickbeard/databases/cache_db.py +++ b/sickbeard/databases/cache_db.py @@ -124,13 +124,3 @@ def execute(self): self.connection.action("CREATE TABLE scene_names (indexer_id INTEGER, name TEXT);") self.connection.action("INSERT INTO scene_names SELECT * FROM tmp_scene_names;") self.connection.action("DROP TABLE tmp_scene_names;") - - -class AddProviderRssCache(ConvertSceneNamesToIndexerScheme): # pylint:disable=too-many-ancestors - """A provider cache table thats used to keep track of the last parsed search results""" - def test(self): - return self.hasTable("provider_rss_cache") - - def execute(self): - self.connection.action( - "CREATE TABLE provider_rss_cache (rss_cache_id INTEGER PRIMARY KEY, name TEXT, url TEXT, time NUMERIC DEFAULT 0, provider_id TEXT NOT NULL);") diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 5dd5f35bf9..622b699f58 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -114,12 +114,6 @@ def _get_db(self): return self.provider_db - def _clear_provider_rss_cache(self): - cache_db_con = self._get_db() - today = int(time.mktime(datetime.datetime.today().timetuple())) - # Keep item in cache for 7 days - cache_db_con.action('DELETE FROM provider_rss_cache WHERE provider_id = ? AND time < ? ', [self.provider_id, today - 7 * 86400]) # 86400 POSIX day (exact value) - def _clearCache(self): """ Performs requalar cache cleaning as required @@ -128,7 +122,6 @@ def _clearCache(self): if sickbeard.CACHE_TRIMMING: # trim items older than MAX_CACHE_AGE days self.trim_cache(days=sickbeard.MAX_CACHE_AGE) - self._clear_provider_rss_cache() def trim_cache(self, days=None): """ @@ -195,14 +188,14 @@ def updateCache(self): self.setLastUpdate() # get last 5 provider_rss_cache results - recent_results = self.get_last_cached_items(5) + recent_results = self.provider.recent_results found_recent_results = 0 stop_at = 1 cl = [] index = 0 for index, item in enumerate(data['entries'] or []): - if recent_results and item.get('link').strip() in [cache_item['url'].strip() for cache_item in recent_results]: + if recent_results and item.get('link').strip() in [cached_item['link'].strip() for cached_item in recent_results]: found_recent_results += 1 if found_recent_results >= stop_at: @@ -221,7 +214,7 @@ def updateCache(self): cache_db_con.mass_action(cl) # finished processing, let's save the newest x (index) items and store these in cache with a max of 5 - self._update_provider_rss_cache(data['entries'][0:min(index, 5)]) + self.provider.recent_results = data['entries'][0:min(index, 5)] except AuthException as e: logger.log(u'Authentication error: {0!r}'.format(e), logger.ERROR) @@ -249,28 +242,6 @@ def update_cache_manual_search(self, manual_data=None): return any(results) - def _update_provider_rss_cache(self, items): - """Updates the table provider_rss_cache with a limited amount of the latest search result url's""" - - cache_db_con = self._get_db() - new_items = [] - sql_results = [] - - for item in items: - # get the current timestamp - cur_time = int(time.mktime(datetime.datetime.today().timetuple())) - - logger.log(u"Added provider_rss_cache item: {0}".format(item.get('link'), self.provider_id), logger.DEBUG) - - new_items.append(["INSERT OR REPLACE INTO provider_rss_cache (name, url, time, provider_id) VALUES (?,?,?,?)", - [item.get('title'), item.get('link'), cur_time, self.provider_id]]) - - if new_items: - logger.log(u'Mass updating provider_rss_cache table with results for provider: {0}'.format(self.provider.name), logger.DEBUG) - sql_results = cache_db_con.mass_action(new_items) - - return any(sql_results) - def getRSSFeed(self, url, params=None): if self.provider.login(): return getFeed(url, params=params, request_hook=self.provider.get_url) diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py index 0c8566acdd..aa6a81d22b 100644 --- a/sickrage/providers/GenericProvider.py +++ b/sickrage/providers/GenericProvider.py @@ -71,6 +71,7 @@ def __init__(self, name): self.supports_backlog = True self.url = '' self.urls = {} + self.max_recent_items = 5 shuffle(self.bt_cache_urls) @@ -518,3 +519,13 @@ def _make_url(self, result): def _verify_download(self, file_name=None): # pylint: disable=unused-argument,no-self-use return True + + @property + def recent_results(self): + return sickbeard.provider_recent_results.get(self.get_id()) or [] + + @recent_results.setter + def recent_results(self, items): + if not sickbeard.provider_recent_results.get(self.get_id()): + sickbeard.provider_recent_results.update({self.get_id(): []}) + sickbeard.provider_recent_results[self.get_id()] += items From b3c5afcef20f3c17348e6ebbbbf7fdf210a7d514 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Fri, 27 May 2016 10:09:22 +0200 Subject: [PATCH 014/134] Added options for only saving latest 5 (configurable per prov) results per provider. --- sickrage/providers/GenericProvider.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py index aa6a81d22b..3edb4f659c 100644 --- a/sickrage/providers/GenericProvider.py +++ b/sickrage/providers/GenericProvider.py @@ -522,10 +522,16 @@ def _verify_download(self, file_name=None): # pylint: disable=unused-argument,n @property def recent_results(self): - return sickbeard.provider_recent_results.get(self.get_id()) or [] + if sickbeard.provider_recent_results.get(self.get_id()): + return sickbeard.provider_recent_results.get(self.get_id())[::-1] + else: + return [] @recent_results.setter def recent_results(self, items): if not sickbeard.provider_recent_results.get(self.get_id()): sickbeard.provider_recent_results.update({self.get_id(): []}) sickbeard.provider_recent_results[self.get_id()] += items + if items: + del sickbeard.provider_recent_results[self.get_id()][:len(sickbeard.provider_recent_results[self.get_id()]) - self.max_recent_items] + pass From 6fdd86211739ec3e3191fca32d1840074707890b Mon Sep 17 00:00:00 2001 From: P0psicles Date: Fri, 27 May 2016 10:25:58 +0200 Subject: [PATCH 015/134] Comments and variable cleanup --- sickbeard/tvcache.py | 11 ++++++----- sickrage/providers/GenericProvider.py | 1 - 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 622b699f58..7fbb588418 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -187,10 +187,10 @@ def updateCache(self): # set updated self.setLastUpdate() - # get last 5 provider_rss_cache results + # get last 5 rss cache results recent_results = self.provider.recent_results - found_recent_results = 0 - stop_at = 1 + found_recent_results = 0 # A counter that keeps track of the number of items that have been found in cache + stop_at = 3 # Configuration as an error margin, to stop at. The lower the number, the faster it will stop parsing items cl = [] index = 0 @@ -213,8 +213,9 @@ def updateCache(self): if cl: cache_db_con.mass_action(cl) - # finished processing, let's save the newest x (index) items and store these in cache with a max of 5 - self.provider.recent_results = data['entries'][0:min(index, 5)] + # finished processing, let's save the newest x (index) items and store these in cache with a max of 5 + # (overwritable per provider, throug hthe max_recent_items attribute. + self.provider.recent_results = data['entries'][0:min(index, self.provider.max_recent_items)] except AuthException as e: logger.log(u'Authentication error: {0!r}'.format(e), logger.ERROR) diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py index 3edb4f659c..bebe33237c 100644 --- a/sickrage/providers/GenericProvider.py +++ b/sickrage/providers/GenericProvider.py @@ -534,4 +534,3 @@ def recent_results(self, items): sickbeard.provider_recent_results[self.get_id()] += items if items: del sickbeard.provider_recent_results[self.get_id()][:len(sickbeard.provider_recent_results[self.get_id()]) - self.max_recent_items] - pass From 3576271d5936e54bd3ff9ada34ea40a4c878fe0f Mon Sep 17 00:00:00 2001 From: P0psicles Date: Fri, 27 May 2016 16:27:10 +0200 Subject: [PATCH 016/134] Removed the inverse sorting * Also fixed a bug where already existing url's where added to the list, when the stop_at paramater is set to > 1. This because the 2 results it neglects, are still attempted to be added to the list. Now it isn't. Keeping the list unique. --- sickbeard/tvcache.py | 2 +- sickrage/providers/GenericProvider.py | 15 ++++++++++----- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 7fbb588418..6c12ce3d23 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -195,7 +195,7 @@ def updateCache(self): cl = [] index = 0 for index, item in enumerate(data['entries'] or []): - if recent_results and item.get('link').strip() in [cached_item['link'].strip() for cached_item in recent_results]: + if recent_results and item['link'] in {cache_item['link'] for cache_item in recent_results}: found_recent_results += 1 if found_recent_results >= stop_at: diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py index bebe33237c..45b09e9042 100644 --- a/sickrage/providers/GenericProvider.py +++ b/sickrage/providers/GenericProvider.py @@ -523,14 +523,19 @@ def _verify_download(self, file_name=None): # pylint: disable=unused-argument,n @property def recent_results(self): if sickbeard.provider_recent_results.get(self.get_id()): - return sickbeard.provider_recent_results.get(self.get_id())[::-1] + return sickbeard.provider_recent_results.get(self.get_id()) else: return [] @recent_results.setter def recent_results(self, items): - if not sickbeard.provider_recent_results.get(self.get_id()): - sickbeard.provider_recent_results.update({self.get_id(): []}) - sickbeard.provider_recent_results[self.get_id()] += items + recent_results = sickbeard.provider_recent_results + if not recent_results.get(self.get_id()): + recent_results.update({self.get_id(): []}) if items: - del sickbeard.provider_recent_results[self.get_id()][:len(sickbeard.provider_recent_results[self.get_id()]) - self.max_recent_items] + add_to_list = [] + for item in items: + if item['link'] not in {cache_item['link'] for cache_item in recent_results[self.get_id()]}: + add_to_list += [item] + recent_results[self.get_id()] = add_to_list + recent_results[self.get_id()] + recent_results[self.get_id()][:self.max_recent_items] From 243b562cbbcc616dcddfdbb36b9339aac0c5f293 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Fri, 27 May 2016 19:39:04 +0200 Subject: [PATCH 017/134] Fixed some dumb coding mistakes. --- sickbeard/tvcache.py | 7 +++---- sickrage/providers/GenericProvider.py | 12 ++++++------ 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 6c12ce3d23..aaeee5dfc2 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -190,15 +190,14 @@ def updateCache(self): # get last 5 rss cache results recent_results = self.provider.recent_results found_recent_results = 0 # A counter that keeps track of the number of items that have been found in cache - stop_at = 3 # Configuration as an error margin, to stop at. The lower the number, the faster it will stop parsing items cl = [] index = 0 for index, item in enumerate(data['entries'] or []): - if recent_results and item['link'] in {cache_item['link'] for cache_item in recent_results}: + if item['link'] in {cache_item['link'] for cache_item in recent_results}: found_recent_results += 1 - if found_recent_results >= stop_at: + if found_recent_results >= self.provider.stop_at: logger.log(u'Hit the old cached items, not parsing any more for: {0}'.format (self.provider_id), logger.ERROR) break @@ -206,7 +205,7 @@ def updateCache(self): ci = self._parseItem(item) if ci is not None: cl.append(ci) - except UnicodeDecodeError, e: + except UnicodeDecodeError: continue cache_db_con = self._get_db() diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py index 45b09e9042..b54ad790fd 100644 --- a/sickrage/providers/GenericProvider.py +++ b/sickrage/providers/GenericProvider.py @@ -71,7 +71,10 @@ def __init__(self, name): self.supports_backlog = True self.url = '' self.urls = {} + + # Paramaters for reducting the daily search results parsing self.max_recent_items = 5 + self.stop_at = 3 shuffle(self.bt_cache_urls) @@ -522,10 +525,7 @@ def _verify_download(self, file_name=None): # pylint: disable=unused-argument,n @property def recent_results(self): - if sickbeard.provider_recent_results.get(self.get_id()): - return sickbeard.provider_recent_results.get(self.get_id()) - else: - return [] + return sickbeard.provider_recent_results.get(self.get_id(), []) @recent_results.setter def recent_results(self, items): @@ -537,5 +537,5 @@ def recent_results(self, items): for item in items: if item['link'] not in {cache_item['link'] for cache_item in recent_results[self.get_id()]}: add_to_list += [item] - recent_results[self.get_id()] = add_to_list + recent_results[self.get_id()] - recent_results[self.get_id()][:self.max_recent_items] + results = add_to_list + recent_results[self.get_id()] + recent_results[self.get_id()] = results[:self.max_recent_items] From 5a02f80bd6b69047d5a00a6e71c09c15ebb51094 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Fri, 27 May 2016 19:45:12 +0200 Subject: [PATCH 018/134] Added Warning message, for UnicodeError in parsing. --- sickbeard/tvcache.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index aaeee5dfc2..5bd7d96fb2 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -205,8 +205,9 @@ def updateCache(self): ci = self._parseItem(item) if ci is not None: cl.append(ci) - except UnicodeDecodeError: - continue + except UnicodeDecodeError as e: + logger.log(u'Unicode decoding error, missed parsing item from provider {0}: {1!r}'.format + (self.provider.name, e), logger.WARNING) cache_db_con = self._get_db() if cl: From 79131d1cae7938190077f19cdbb565680eb18df3 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Fri, 27 May 2016 20:08:56 +0200 Subject: [PATCH 019/134] Missed the _getDB snakecase rename. --- sickbeard/providers/binsearch.py | 2 +- sickbeard/providers/womble.py | 2 +- sickrage/providers/GenericProvider.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sickbeard/providers/binsearch.py b/sickbeard/providers/binsearch.py index fc48e7e191..30d8cd9cd6 100644 --- a/sickbeard/providers/binsearch.py +++ b/sickbeard/providers/binsearch.py @@ -114,7 +114,7 @@ def updateCache(self): cl.append(ci) if cl: - cache_db_con = self._getDB() + cache_db_con = self._get_db() cache_db_con.mass_action(cl) def _checkAuth(self, data): diff --git a/sickbeard/providers/womble.py b/sickbeard/providers/womble.py index f5333c6ae4..7158e3da80 100644 --- a/sickbeard/providers/womble.py +++ b/sickbeard/providers/womble.py @@ -66,7 +66,7 @@ def updateCache(self): cl.append(ci) if cl: - cache_db_con = self._getDB() + cache_db_con = self._get_db() cache_db_con.mass_action(cl) def _checkAuth(self, data): diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py index b54ad790fd..5c62009501 100644 --- a/sickrage/providers/GenericProvider.py +++ b/sickrage/providers/GenericProvider.py @@ -327,7 +327,7 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, if cl: # pylint: disable=protected-access # Access to a protected member of a client class - db = self.cache._getDB() + db = self.cache._get_db() db.mass_action(cl) return results From 4acef226ce776c23596183d825d70e6dfd1e9ff8 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sun, 29 May 2016 21:39:04 +0200 Subject: [PATCH 020/134] Moved global to GenericProvider, changed var name. --- sickbeard/__init__.py | 5 +---- sickrage/providers/GenericProvider.py | 7 +++++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py index 65f1326d99..809da9f5eb 100644 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -606,8 +606,6 @@ PRIVACY_LEVEL = 'normal' -provider_recent_results = {} - def get_backlog_cycle_time(): cycletime = DAILYSEARCH_FREQUENCY * 2 + 7 @@ -661,8 +659,7 @@ def initialize(consoleLogging=True): # pylint: disable=too-many-locals, too-man AUTOPOSTPROCESSOR_FREQUENCY, SHOWUPDATE_HOUR, \ ANIME_DEFAULT, NAMING_ANIME, ANIMESUPPORT, USE_ANIDB, ANIDB_USERNAME, ANIDB_PASSWORD, ANIDB_USE_MYLIST, \ ANIME_SPLIT_HOME, SCENE_DEFAULT, DOWNLOAD_URL, BACKLOG_DAYS, GIT_USERNAME, GIT_PASSWORD, \ - DEVELOPER, gh, DISPLAY_ALL_SEASONS, SSL_VERIFY, NEWS_LAST_READ, NEWS_LATEST, SOCKET_TIMEOUT, RECENTLY_DELETED, \ - provider_recent_results + DEVELOPER, gh, DISPLAY_ALL_SEASONS, SSL_VERIFY, NEWS_LAST_READ, NEWS_LATEST, SOCKET_TIMEOUT, RECENTLY_DELETED if __INITIALIZED__: return False diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py index 5c62009501..a595e97682 100644 --- a/sickrage/providers/GenericProvider.py +++ b/sickrage/providers/GenericProvider.py @@ -39,6 +39,10 @@ from sickrage.helper.exceptions import ex +# Keep a list of per provider of recent provider search results +recent_results = {} + + class GenericProvider(object): # pylint: disable=too-many-instance-attributes NZB = 'nzb' TORRENT = 'torrent' @@ -525,11 +529,10 @@ def _verify_download(self, file_name=None): # pylint: disable=unused-argument,n @property def recent_results(self): - return sickbeard.provider_recent_results.get(self.get_id(), []) + return recent_results.get(self.get_id(), []) @recent_results.setter def recent_results(self, items): - recent_results = sickbeard.provider_recent_results if not recent_results.get(self.get_id()): recent_results.update({self.get_id(): []}) if items: From d53f1da4d2eaa8a6c8cb8fe74062fed86bd4c0a5 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Mon, 30 May 2016 09:39:14 +0200 Subject: [PATCH 021/134] This should in no way be an error ofcourse --- sickbeard/tvcache.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 5bd7d96fb2..3dae8f2bf7 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -199,7 +199,7 @@ def updateCache(self): if found_recent_results >= self.provider.stop_at: logger.log(u'Hit the old cached items, not parsing any more for: {0}'.format - (self.provider_id), logger.ERROR) + (self.provider_id), logger.DEBUG) break try: ci = self._parseItem(item) From a7388c56ef316948ea5f1f419ff865418fee2ca3 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Fri, 3 Jun 2016 16:11:19 +0200 Subject: [PATCH 022/134] Moved seeders sorting out of providers code and into the sickbeard/search.py searchProviders() code. * Removed the lambda sort from all providers * Corrected bug introducted in TVCache * Removed bogus condition from GenericProvider. That condition can never be true. --- sickbeard/providers/abnormal.py | 2 -- sickbeard/providers/alpharatio.py | 2 -- sickbeard/providers/bitcannon.py | 2 -- sickbeard/providers/bitsnoop.py | 2 -- sickbeard/providers/bluetigers.py | 3 --- sickbeard/providers/cpasbien.py | 2 -- sickbeard/providers/danishbits.py | 2 -- sickbeard/providers/elitetorrent.py | 3 --- sickbeard/providers/extratorrent.py | 2 -- sickbeard/providers/freshontv.py | 2 -- sickbeard/providers/gftracker.py | 2 -- sickbeard/providers/hd4free.py | 5 +---- sickbeard/providers/hdspace.py | 2 -- sickbeard/providers/hdtorrents.py | 3 --- sickbeard/providers/hounddawgs.py | 3 --- sickbeard/providers/ilovetorrents.py | 3 --- sickbeard/providers/iptorrents.py | 3 --- sickbeard/providers/kat.py | 3 --- sickbeard/providers/limetorrents.py | 3 --- sickbeard/providers/morethantv.py | 2 -- sickbeard/providers/norbits.py | 2 -- sickbeard/providers/nyaatorrents.py | 2 -- sickbeard/providers/pretome.py | 3 --- sickbeard/providers/rarbg.py | 2 -- sickbeard/providers/scc.py | 3 --- sickbeard/providers/sceneelite.py | 6 ++---- sickbeard/providers/scenetime.py | 3 --- sickbeard/providers/speedcd.py | 2 -- sickbeard/providers/t411.py | 3 --- sickbeard/providers/thepiratebay.py | 2 -- sickbeard/providers/tntvillage.py | 3 --- sickbeard/providers/tokyotoshokan.py | 2 -- sickbeard/providers/torrentbytes.py | 2 -- sickbeard/providers/torrentday.py | 2 -- sickbeard/providers/torrentleech.py | 2 -- sickbeard/providers/torrentproject.py | 2 -- sickbeard/providers/torrentz.py | 2 -- sickbeard/providers/transmitthenet.py | 2 -- sickbeard/providers/tvchaosuk.py | 2 -- sickbeard/providers/xthor.py | 2 -- sickbeard/search.py | 4 ++++ sickbeard/tvcache.py | 4 ++-- sickrage/providers/GenericProvider.py | 8 ++------ 43 files changed, 11 insertions(+), 105 deletions(-) diff --git a/sickbeard/providers/abnormal.py b/sickbeard/providers/abnormal.py index c22ea6255a..d7e791ecb0 100644 --- a/sickbeard/providers/abnormal.py +++ b/sickbeard/providers/abnormal.py @@ -158,8 +158,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except StandardError: continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/alpharatio.py b/sickbeard/providers/alpharatio.py index e30c2f537a..d85e9faba8 100644 --- a/sickbeard/providers/alpharatio.py +++ b/sickbeard/providers/alpharatio.py @@ -171,8 +171,6 @@ def process_column_header(td): except StandardError: continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/bitcannon.py b/sickbeard/providers/bitcannon.py index 4d9b4a4f23..66aca08fa2 100644 --- a/sickbeard/providers/bitcannon.py +++ b/sickbeard/providers/bitcannon.py @@ -110,8 +110,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except (AttributeError, TypeError, KeyError, ValueError): continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/bitsnoop.py b/sickbeard/providers/bitsnoop.py index 7197a72401..c9d98a17f8 100644 --- a/sickbeard/providers/bitsnoop.py +++ b/sickbeard/providers/bitsnoop.py @@ -118,8 +118,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except (AttributeError, TypeError, KeyError, ValueError): logger.log(u"Failed parsing provider. Traceback: %r" % traceback.format_exc(), logger.ERROR) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/bluetigers.py b/sickbeard/providers/bluetigers.py index 867a8f1184..487668f1a9 100644 --- a/sickbeard/providers/bluetigers.py +++ b/sickbeard/providers/bluetigers.py @@ -136,9 +136,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except Exception: logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) - results += items return results diff --git a/sickbeard/providers/cpasbien.py b/sickbeard/providers/cpasbien.py index ec582029d9..7802a4c420 100644 --- a/sickbeard/providers/cpasbien.py +++ b/sickbeard/providers/cpasbien.py @@ -91,8 +91,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except StandardError: continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/danishbits.py b/sickbeard/providers/danishbits.py index bcde3185d0..56b414e540 100644 --- a/sickbeard/providers/danishbits.py +++ b/sickbeard/providers/danishbits.py @@ -171,8 +171,6 @@ def process_column_header(td): except StandardError: continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/elitetorrent.py b/sickbeard/providers/elitetorrent.py index 9809c08ae2..c0720c8295 100644 --- a/sickbeard/providers/elitetorrent.py +++ b/sickbeard/providers/elitetorrent.py @@ -132,9 +132,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except Exception: logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.WARNING) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) - results += items return results diff --git a/sickbeard/providers/extratorrent.py b/sickbeard/providers/extratorrent.py index 7b05cf9163..4764ffbb2b 100644 --- a/sickbeard/providers/extratorrent.py +++ b/sickbeard/providers/extratorrent.py @@ -110,8 +110,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man items.append(item) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index e30f3f3587..95ce723fdc 100644 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -227,8 +227,6 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many except Exception: logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/gftracker.py b/sickbeard/providers/gftracker.py index e82a3b6928..85c78b4eba 100644 --- a/sickbeard/providers/gftracker.py +++ b/sickbeard/providers/gftracker.py @@ -179,8 +179,6 @@ def process_column_header(td): except StandardError: continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/hd4free.py b/sickbeard/providers/hd4free.py index c69890595e..27d94860ea 100644 --- a/sickbeard/providers/hd4free.py +++ b/sickbeard/providers/hd4free.py @@ -70,7 +70,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_params.pop('fl', '') if mode != 'RSS': - logger.log(u"Search string: " + search_string.strip(), logger.DEBUG) + logger.log(u"Search string: {0}".format(search_string), logger.DEBUG) search_params['search'] = search_string else: search_params.pop('search', '') @@ -122,9 +122,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except StandardError: continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) - results += items return results diff --git a/sickbeard/providers/hdspace.py b/sickbeard/providers/hdspace.py index 3a5156393e..24f26183a0 100644 --- a/sickbeard/providers/hdspace.py +++ b/sickbeard/providers/hdspace.py @@ -162,8 +162,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except (AttributeError, TypeError, KeyError, ValueError): continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index b61d69b2bc..65334f8789 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -170,9 +170,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man items.append(item) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) - results += items return results diff --git a/sickbeard/providers/hounddawgs.py b/sickbeard/providers/hounddawgs.py index bbdf2953e1..c0d0486e93 100644 --- a/sickbeard/providers/hounddawgs.py +++ b/sickbeard/providers/hounddawgs.py @@ -176,9 +176,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except Exception: logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) - results += items return results diff --git a/sickbeard/providers/ilovetorrents.py b/sickbeard/providers/ilovetorrents.py index 9d1824ff14..0e6a027b93 100644 --- a/sickbeard/providers/ilovetorrents.py +++ b/sickbeard/providers/ilovetorrents.py @@ -158,9 +158,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except Exception: logger.log(u"Failed parsing provider. Traceback: {0}".format(traceback.format_exc()), logger.WARNING) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) - results += items return results diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py index 0f029ab1f6..7b3b178863 100644 --- a/sickbeard/providers/iptorrents.py +++ b/sickbeard/providers/iptorrents.py @@ -157,9 +157,6 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many except Exception as e: logger.log(u"Failed parsing provider. Error: %r" % ex(e), logger.ERROR) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) - results += items return results diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py index bd74f40149..7d93428388 100644 --- a/sickbeard/providers/kat.py +++ b/sickbeard/providers/kat.py @@ -132,9 +132,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except (AttributeError, TypeError, KeyError, ValueError): continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) - results += items return results diff --git a/sickbeard/providers/limetorrents.py b/sickbeard/providers/limetorrents.py index 2eae5c3cf1..8e8e1dd798 100644 --- a/sickbeard/providers/limetorrents.py +++ b/sickbeard/providers/limetorrents.py @@ -180,9 +180,6 @@ def parse(self, data, mode): (traceback.format_exc()), logger.ERROR) continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) - return items diff --git a/sickbeard/providers/morethantv.py b/sickbeard/providers/morethantv.py index 3806e71442..fd3647286d 100644 --- a/sickbeard/providers/morethantv.py +++ b/sickbeard/providers/morethantv.py @@ -181,8 +181,6 @@ def process_column_header(td): except StandardError: continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/norbits.py b/sickbeard/providers/norbits.py index 6ca871c9cf..15983351d5 100644 --- a/sickbeard/providers/norbits.py +++ b/sickbeard/providers/norbits.py @@ -129,8 +129,6 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many title, seeders, leechers), logger.DEBUG) items.append(item) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items diff --git a/sickbeard/providers/nyaatorrents.py b/sickbeard/providers/nyaatorrents.py index bf04548b4b..d461ec56ab 100644 --- a/sickbeard/providers/nyaatorrents.py +++ b/sickbeard/providers/nyaatorrents.py @@ -114,8 +114,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except StandardError: continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/pretome.py b/sickbeard/providers/pretome.py index abcdc3f12f..0f2c9cb82e 100644 --- a/sickbeard/providers/pretome.py +++ b/sickbeard/providers/pretome.py @@ -161,9 +161,6 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many except Exception: logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) - results += items return results diff --git a/sickbeard/providers/rarbg.py b/sickbeard/providers/rarbg.py index f0e0aea7ab..67fbbddc7d 100644 --- a/sickbeard/providers/rarbg.py +++ b/sickbeard/providers/rarbg.py @@ -176,8 +176,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except StandardError: continue - # For each search mode sort all the items by seeders - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py index 96d813e513..652dd77de2 100644 --- a/sickbeard/providers/scc.py +++ b/sickbeard/providers/scc.py @@ -159,9 +159,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man items.append(item) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) - results += items return results diff --git a/sickbeard/providers/sceneelite.py b/sickbeard/providers/sceneelite.py index c038588ccd..3c7f987e70 100644 --- a/sickbeard/providers/sceneelite.py +++ b/sickbeard/providers/sceneelite.py @@ -137,15 +137,13 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if mode != "RSS": logger.log("Found result: {0} with {1} seeders and {2} leechers".format - (title, seeders, leechers), logger.DEBUG) + (title, seeders, leechers), logger.DEBUG) items.append(item) except StandardError: continue - - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) + results += items return results diff --git a/sickbeard/providers/scenetime.py b/sickbeard/providers/scenetime.py index 73c34a3ea7..496c4f500b 100644 --- a/sickbeard/providers/scenetime.py +++ b/sickbeard/providers/scenetime.py @@ -142,9 +142,6 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many items.append(item) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) - results += items return results diff --git a/sickbeard/providers/speedcd.py b/sickbeard/providers/speedcd.py index 3caf39bf51..490f520759 100644 --- a/sickbeard/providers/speedcd.py +++ b/sickbeard/providers/speedcd.py @@ -172,8 +172,6 @@ def process_column_header(td): except StandardError: continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/t411.py b/sickbeard/providers/t411.py index 4207044fe7..fe5f9ddb1e 100644 --- a/sickbeard/providers/t411.py +++ b/sickbeard/providers/t411.py @@ -154,9 +154,6 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many except Exception: logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) - # For each search mode sort all the items by seeders if available if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) - results += items return results diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py index 9826e78154..c7a89111a6 100644 --- a/sickbeard/providers/thepiratebay.py +++ b/sickbeard/providers/thepiratebay.py @@ -163,8 +163,6 @@ def process_column_header(th): except StandardError: continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index 049accc078..8ccb4de307 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -395,9 +395,6 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many except Exception: logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) - # For each search mode sort all the items by seeders if available if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) - results += items return results diff --git a/sickbeard/providers/tokyotoshokan.py b/sickbeard/providers/tokyotoshokan.py index 19a0c54713..a935ddd9a7 100644 --- a/sickbeard/providers/tokyotoshokan.py +++ b/sickbeard/providers/tokyotoshokan.py @@ -112,8 +112,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man items.append(item) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/torrentbytes.py b/sickbeard/providers/torrentbytes.py index 399a0608ff..00d1a51fb3 100644 --- a/sickbeard/providers/torrentbytes.py +++ b/sickbeard/providers/torrentbytes.py @@ -154,8 +154,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except (AttributeError, TypeError): continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py index 267ec87779..a40958def1 100644 --- a/sickbeard/providers/torrentday.py +++ b/sickbeard/providers/torrentday.py @@ -166,8 +166,6 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many items.append(item) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py index 56fa9a4eb2..496cb68883 100644 --- a/sickbeard/providers/torrentleech.py +++ b/sickbeard/providers/torrentleech.py @@ -169,8 +169,6 @@ def process_column_header(td): except StandardError: continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/torrentproject.py b/sickbeard/providers/torrentproject.py index 64ece6ab04..a7ef07e76b 100644 --- a/sickbeard/providers/torrentproject.py +++ b/sickbeard/providers/torrentproject.py @@ -130,8 +130,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man (traceback.format_exc()), logger.ERROR) continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/torrentz.py b/sickbeard/providers/torrentz.py index af8bfd4893..c9cad22098 100644 --- a/sickbeard/providers/torrentz.py +++ b/sickbeard/providers/torrentz.py @@ -112,8 +112,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except StandardError: logger.log(u"Failed parsing provider. Traceback: %r" % traceback.format_exc(), logger.ERROR) - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/transmitthenet.py b/sickbeard/providers/transmitthenet.py index 92b3065da9..9113244236 100644 --- a/sickbeard/providers/transmitthenet.py +++ b/sickbeard/providers/transmitthenet.py @@ -174,8 +174,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except Exception: logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) - # For each search mode sort all the items by seeders - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/tvchaosuk.py b/sickbeard/providers/tvchaosuk.py index e03564b198..0e1006e916 100644 --- a/sickbeard/providers/tvchaosuk.py +++ b/sickbeard/providers/tvchaosuk.py @@ -168,8 +168,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except StandardError: continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/xthor.py b/sickbeard/providers/xthor.py index d4feb83ada..e3e7c4a2be 100644 --- a/sickbeard/providers/xthor.py +++ b/sickbeard/providers/xthor.py @@ -184,8 +184,6 @@ def process_column_header(td): except StandardError: continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/search.py b/sickbeard/search.py index 89fe022186..1457d8f805 100644 --- a/sickbeard/search.py +++ b/sickbeard/search.py @@ -584,6 +584,10 @@ def searchProviders(show, episodes, forced_search=False, downCurQuality=False, m else: foundResults[cur_provider.name][curEp] = searchResults[curEp] + # Sort the list by seeders if possible + if cur_provider.provider_type == 'torrent': + foundResults[cur_provider.name][curEp].sort(key=lambda d: int(d.seeders), reverse=True) + break elif not cur_provider.search_fallback or searchCount == 2: break diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 3dae8f2bf7..adcfddd940 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -344,7 +344,7 @@ def shouldClearCache(self): return False - def _addCacheEntry(self, name, url, seeders, leechers, size, pubdate, hash): + def _addCacheEntry(self, name, url, seeders, leechers, size, pubdate, torrent_hash, parse_result=None, indexer_id=0): try: parse_result = NameParser().parse(name) @@ -383,7 +383,7 @@ def _addCacheEntry(self, name, url, seeders, leechers, size, pubdate, hash): 'INSERT OR REPLACE INTO [{0}] (name, season, episodes, indexerid, url, time, quality, release_group, ' 'version, seeders, leechers, size, pubdate, hash) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)'.format(self.provider_id), [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, - release_group, version, seeders, leechers, size, pubdate, hash]] + release_group, version, seeders, leechers, size, pubdate, torrent_hash]] def searchCache(self, episode, forced_search=False, downCurQuality=False): neededEps = self.findNeededEpisodes(episode, forced_search, downCurQuality) diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py index a595e97682..96f62875e5 100644 --- a/sickrage/providers/GenericProvider.py +++ b/sickrage/providers/GenericProvider.py @@ -140,10 +140,6 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, continue - # NOTE: searched_scene_season is always None? - if (len(episodes) > 1 or manual_search_type == 'season') and search_mode == 'sponly' and searched_scene_season == episode.scene_season: - continue - search_strings = [] searched_scene_season = episode.scene_season @@ -183,7 +179,7 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, (seeders, leechers) = self._get_result_info(item) size = self._get_size(item) pubdate = self._get_pubdate(item) - hash = self._get_hash(item) + torrent_hash = self._get_hash(item) try: parse_result = NameParser(parse_method=('normal', 'anime')[show.is_anime]).parse(title) @@ -272,7 +268,7 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, logger.log(u'Adding item from search to cache: %s' % title, logger.DEBUG) # pylint: disable=protected-access # Access to a protected member of a client class - ci = self.cache._addCacheEntry(title, url, seeders, leechers, size, pubdate, hash, parse_result=parse_result) + ci = self.cache._addCacheEntry(title, url, seeders, leechers, size, pubdate, torrent_hash, parse_result=parse_result) if ci is not None: cl.append(ci) From 1cf6622529d1cd53125d789cc2a2a86694a83f08 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 4 Jun 2016 20:14:12 +0200 Subject: [PATCH 023/134] Standardize first 10 providers --- sickbeard/providers/abnormal.py | 41 ++++++---- sickbeard/providers/alpharatio.py | 123 +++++++++++++++-------------- sickbeard/providers/anizb.py | 28 ++++--- sickbeard/providers/binsearch.py | 13 ++-- sickbeard/providers/bitcannon.py | 87 ++++++++++++--------- sickbeard/providers/bitsnoop.py | 53 ++++++++----- sickbeard/providers/bluetigers.py | 56 +++++++++----- sickbeard/providers/btdigg.py | 102 +++++++++++++----------- sickbeard/providers/btn.py | 124 +++++++++++++++--------------- sickbeard/providers/cpasbien.py | 52 ++++++++----- 10 files changed, 387 insertions(+), 292 deletions(-) diff --git a/sickbeard/providers/abnormal.py b/sickbeard/providers/abnormal.py index d7e791ecb0..d550cbad92 100644 --- a/sickbeard/providers/abnormal.py +++ b/sickbeard/providers/abnormal.py @@ -1,26 +1,26 @@ # coding=utf-8 # Author: adaur # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals import re +import traceback + from requests.compat import urljoin from requests.utils import dict_from_cookiejar @@ -86,7 +86,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Search Params search_params = { - 'cat[]': ['TV|SD|VOSTFR', 'TV|HD|VOSTFR', 'TV|SD|VF', 'TV|HD|VF', 'TV|PACK|FR', 'TV|PACK|VOSTFR', 'TV|EMISSIONS', 'ANIME'], + 'cat[]': ['TV|SD|VOSTFR', 'TV|HD|VOSTFR', 'TV|SD|VF', 'TV|HD|VF', + 'TV|PACK|FR', 'TV|PACK|VOSTFR', 'TV|EMISSIONS', 'ANIME'], # Both ASC and DESC are available for sort direction 'way': 'DESC' } @@ -96,12 +97,12 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log('Search Mode: {}'.format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {}'.format(search_string.decode('utf-8')), + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) # Sorting: Available parameters: ReleaseName, Seeders, Leechers, Snatched, Size @@ -115,7 +116,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man torrent_table = html.find(class_='torrent_table') torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if at least one Release is found + # Continue only if at least one release is found if len(torrent_rows) < 2: logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue @@ -131,7 +132,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man try: title = cells[labels.index('Release')].get_text(strip=True) - download_url = urljoin(self.url, cells[labels.index('DL')].find('a', class_='tooltip')['href']) + download = cells[labels.index('DL')].find('a', class_='tooltip')['href'] + download_url = urljoin(self.url, download) if not all([title, download_url]): continue @@ -141,7 +143,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log('Discarding torrent because it doesn\'t meet the minimum seeders: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the" + 'minimum seeders: {0}. Seeders: {1})'.format (title, seeders), logger.DEBUG) continue @@ -149,13 +152,23 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man torrent_size = cells[size_index].get_text() size = convert_size(torrent_size, units=units) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) items.append(item) - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue results += items diff --git a/sickbeard/providers/alpharatio.py b/sickbeard/providers/alpharatio.py index d85e9faba8..5306d4a247 100644 --- a/sickbeard/providers/alpharatio.py +++ b/sickbeard/providers/alpharatio.py @@ -1,25 +1,25 @@ # coding=utf-8 # Author: Dustyn Gibson # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals + import re +import traceback from requests.compat import urljoin from requests.utils import dict_from_cookiejar @@ -36,7 +36,7 @@ class AlphaRatioProvider(TorrentProvider): # pylint: disable=too-many-instance- def __init__(self): # Provider Init - TorrentProvider.__init__(self, "AlphaRatio") + TorrentProvider.__init__(self, 'AlphaRatio') # Credentials self.username = None @@ -47,14 +47,14 @@ def __init__(self): self.minleech = None # URLs - self.url = "http://alpharatio.cc" + self.url = 'http://alpharatio.cc' self.urls = { - "login": urljoin(self.url, "login.php"), - "search": urljoin(self.url, "torrents.php"), + 'login': urljoin(self.url, 'login.php'), + 'search': urljoin(self.url, 'torrents.php'), } # Proper Strings - self.proper_strings = ["PROPER", "REPACK"] + self.proper_strings = ['PROPER', 'REPACK'] # Cache self.cache = tvcache.TVCache(self) @@ -64,20 +64,20 @@ def login(self): return True login_params = { - "username": self.username, - "password": self.password, - "login": "submit", - "remember_me": "on", + 'username': self.username, + 'password': self.password, + 'login': 'submit', + 'remember_me': 'on', } - response = self.get_url(self.urls["login"], post_data=login_params, returns="text") + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log("Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False - if re.search("Invalid Username/password", response) \ - or re.search("Login :: AlphaRatio.cc", response): - logger.log("Invalid username or password. Check your settings", logger.WARNING) + if re.search('Invalid Username/password', response) \ + or re.search('Login :: AlphaRatio.cc', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -89,86 +89,97 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Search Params search_params = { - "searchstr": "", - "filter_cat[1]": 1, - "filter_cat[2]": 1, - "filter_cat[3]": 1, - "filter_cat[4]": 1, - "filter_cat[5]": 1 + 'searchstr': '', + 'filter_cat[1]': 1, + 'filter_cat[2]': 1, + 'filter_cat[3]': 1, + 'filter_cat[4]': 1, + 'filter_cat[5]': 1 } # Units - units = ["B", "KB", "MB", "GB", "TB", "PB"] + units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'] def process_column_header(td): - result = "" + result = '' if td.a and td.a.img: - result = td.a.img.get("title", td.a.get_text(strip=True)) + result = td.a.img.get('title', td.a.get_text(strip=True)) if not result: result = td.get_text(strip=True) return result for mode in search_strings: items = [] - logger.log("Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: - if mode != "RSS": - logger.log("Search string: {search}".format - (search=search_string.decode("utf-8")), logger.DEBUG) + if mode != 'RSS': + logger.log('Search string: {search}'.format + (search=search_string.decode('utf-8')), logger.DEBUG) - search_params["searchstr"] = search_string - search_url = self.urls["search"] - data = self.get_url(search_url, params=search_params, returns="text") + search_params['searchstr'] = search_string + search_url = self.urls['search'] + data = self.get_url(search_url, params=search_params, returns='text') if not data: - logger.log("No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue - with BS4Parser(data, "html5lib") as html: - torrent_table = html.find("table", id="torrent_table") - torrent_rows = torrent_table("tr") if torrent_table else [] + with BS4Parser(data, 'html5lib') as html: + torrent_table = html.find('table', id='torrent_table') + torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if at least one Release is found + # Continue only if at least one release is found if len(torrent_rows) < 2: - logger.log("Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue - # "", "", "Name /Year", "Files", "Time", "Size", "Snatches", "Seeders", "Leechers" - labels = [process_column_header(label) for label in torrent_rows[0]("td")] + # '', '', 'Name /Year', 'Files', 'Time', 'Size', 'Snatches', 'Seeders', 'Leechers' + labels = [process_column_header(label) for label in torrent_rows[0]('td')] # Skip column headers for result in torrent_rows[1:]: - cells = result("td") + cells = result('td') if len(cells) < len(labels): continue try: - title = cells[labels.index("Name /Year")].find("a", dir="ltr").get_text(strip=True) - download_url = urljoin(self.url, cells[labels.index("Name /Year")].find("a", title="Download")["href"]) + title = cells[labels.index('Name /Year')].find('a', dir='ltr').get_text(strip=True) + download = cells[labels.index('Name /Year')].find('a', title='Download')['href'] + download_url = urljoin(self.url, download) if not all([title, download_url]): continue - seeders = try_int(cells[labels.index("Seeders")].get_text(strip=True)) - leechers = try_int(cells[labels.index("Leechers")].get_text(strip=True)) + seeders = try_int(cells[labels.index('Seeders')].get_text(strip=True)) + leechers = try_int(cells[labels.index('Leechers')].get_text(strip=True)) # Filter unseeded torrent if seeders < min(self.minseed, 1): - if mode != "RSS": + if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1})".format + ' minimum seeders: {0}. Seeders: {1})'.format (title, seeders), logger.DEBUG) continue - torrent_size = cells[labels.index("Size")].get_text(strip=True) + torrent_size = cells[labels.index('Size')].get_text(strip=True) size = convert_size(torrent_size, units=units) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != "RSS": - logger.log("Found result: {0} with {1} seeders and {2} leechers".format + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) items.append(item) - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue results += items diff --git a/sickbeard/providers/anizb.py b/sickbeard/providers/anizb.py index 91bca698fa..5a4ec03068 100644 --- a/sickbeard/providers/anizb.py +++ b/sickbeard/providers/anizb.py @@ -16,21 +16,26 @@ # You should have received a copy of the GNU General Public License # along with Medusa. If not, see . +from __future__ import unicode_literals + import traceback from sickbeard import logger, tvcache + from sickrage.providers.nzb.NZBProvider import NZBProvider from sickrage.helper.common import try_int + from requests.compat import urljoin + from bs4 import BeautifulSoup class Anizb(NZBProvider): # pylint: disable=too-many-instance-attributes - """Nzb Provider using the open api of anizb.org for daily (rss) and backlog/forced searches""" + '''Nzb Provider using the open api of anizb.org for daily (rss) and backlog/forced searches''' def __init__(self): # Provider Init - NZBProvider.__init__(self, "Anizb") + NZBProvider.__init__(self, 'Anizb') self.public = True self.supports_absolute_numbering = True @@ -47,24 +52,22 @@ def __init__(self): self.cache = tvcache.TVCache(self) def _get_size(self, item): - """Override the default _get_size to prevent it from extracting using it the default tags""" + '''Override the default _get_size to prevent it from extracting using it the default tags''' return try_int(item.get('size')) def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals - """Start searching for anime using the provided search_strings. Used for backlog and daily""" - _ = age - _ = ep_obj + '''Start searching for anime using the provided search_strings. Used for backlog and daily''' results = [] if self.show and not self.show.is_anime: return results for mode in search_strings: - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) try: @@ -72,7 +75,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man data = self.get_url(search_url, returns='text') if not data: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log(u'No data returned from provider', logger.DEBUG) continue if not data.startswith(' # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals import re + from requests.compat import urljoin from sickbeard import logger, tvcache @@ -44,6 +43,7 @@ def __init__(self): class BinSearchCache(tvcache.TVCache): + def __init__(self, provider_obj, **kwargs): kwargs.pop('search_params', None) # does not use _getRSSData so strip param from kwargs... search_params = None # ...and pass None instead @@ -120,4 +120,5 @@ def updateCache(self): def _checkAuth(self, data): return data if data['feed'] and data['feed']['title'] != 'Invalid Link' else None + provider = BinSearchProvider() diff --git a/sickbeard/providers/bitcannon.py b/sickbeard/providers/bitcannon.py index 66aca08fa2..a7abf41ae2 100644 --- a/sickbeard/providers/bitcannon.py +++ b/sickbeard/providers/bitcannon.py @@ -1,27 +1,27 @@ # coding=utf-8 # Author: Dustyn Gibson # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals -from requests.compat import urljoin import validators +import traceback + +from requests.compat import urljoin from sickbeard import logger, tvcache @@ -33,82 +33,92 @@ class BitCannonProvider(TorrentProvider): def __init__(self): - TorrentProvider.__init__(self, "BitCannon") + TorrentProvider.__init__(self, 'BitCannon') self.minseed = None self.minleech = None self.custom_url = None self.api_key = None - self.cache = tvcache.TVCache(self, search_params={"RSS": ["tv", "anime"]}) + self.cache = tvcache.TVCache(self, search_params={'RSS': ['tv', 'anime']}) def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals results = [] - url = "http://localhost:3000/" + url = 'http://localhost:3000/' if self.custom_url: if not validators.url(self.custom_url, require_tld=False): - logger.log("Invalid custom url set, please check your settings", logger.WARNING) + logger.log('Invalid custom url set, please check your settings', logger.WARNING) return results url = self.custom_url search_params = {} anime = ep_obj and ep_obj.show and ep_obj.show.anime - search_params["category"] = ("tv", "anime")[bool(anime)] + search_params['category'] = ('tv', 'anime')[bool(anime)] if self.api_key: - search_params["apiKey"] = self.api_key + search_params['apiKey'] = self.api_key for mode in search_strings: items = [] - logger.log("Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: - search_params["q"] = search_string - if mode != "RSS": - logger.log("Search string: {}".format(search_string), logger.DEBUG) + search_params['q'] = search_string + if mode != 'RSS': + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) - search_url = urljoin(url, "api/search") - parsed_json = self.get_url(search_url, params=search_params, returns="json") + search_url = urljoin(url, 'api/search') + parsed_json = self.get_url(search_url, params=search_params, returns='json') if not parsed_json: - logger.log("No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue if not self._check_auth_from_data(parsed_json): return results - for result in parsed_json.pop("torrents", {}): + for result in parsed_json.pop('torrents', {}): try: - title = result.pop("title", "") + title = result.pop('title', '') - info_hash = result.pop("infoHash", "") - download_url = "magnet:?xt=urn:btih:" + info_hash + info_hash = result.pop('infoHash', '') + download_url = 'magnet:?xt=urn:btih:' + info_hash if not all([title, download_url, info_hash]): continue - swarm = result.pop("swarm", None) + swarm = result.pop('swarm', None) if swarm: - seeders = try_int(swarm.pop("seeders", 0)) - leechers = try_int(swarm.pop("leechers", 0)) + seeders = try_int(swarm.pop('seeders', 0)) + leechers = try_int(swarm.pop('leechers', 0)) else: seeders = leechers = 0 if seeders < min(self.minseed, 1): - if mode != "RSS": + if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the " - "minimum seeders: {0}. Seeders: {1})".format + 'minimum seeders: {0}. Seeders: {1})'.format (title, seeders), logger.DEBUG) continue - size = convert_size(result.pop("size", -1)) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != "RSS": - logger.log("Found result: {0} with {1} seeders and {2} leechers".format + size = convert_size(result.pop('size', -1)) or -1 + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) items.append(item) - except (AttributeError, TypeError, KeyError, ValueError): - continue + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) results += items @@ -117,12 +127,13 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man @staticmethod def _check_auth_from_data(data): if not all([isinstance(data, dict), - data.pop("status", 200) != 401, - data.pop("message", "") != "Invalid API key"]): + data.pop('status', 200) != 401, + data.pop('message', '') != 'Invalid API key']): - logger.log("Invalid api key. Check your settings", logger.WARNING) + logger.log('Invalid api key. Check your settings', logger.WARNING) return False return True + provider = BitCannonProvider() diff --git a/sickbeard/providers/bitsnoop.py b/sickbeard/providers/bitsnoop.py index c9d98a17f8..4522a8d437 100644 --- a/sickbeard/providers/bitsnoop.py +++ b/sickbeard/providers/bitsnoop.py @@ -1,27 +1,28 @@ # coding=utf-8 # Author: Gonçalo M. (aka duramato/supergonkas) # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . + +from __future__ import unicode_literals import traceback +import sickbeard + from bs4 import BeautifulSoup -import sickbeard from sickbeard import logger, tvcache from sickrage.helper.common import convert_size, try_int @@ -32,7 +33,7 @@ class BitSnoopProvider(TorrentProvider): # pylint: disable=too-many-instance-at def __init__(self): - TorrentProvider.__init__(self, "BitSnoop") + TorrentProvider.__init__(self, 'BitSnoop') self.urls = { 'index': 'http://bitsnoop.com', @@ -52,13 +53,14 @@ def __init__(self): def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches,too-many-locals results = [] + for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) try: @@ -66,11 +68,11 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man data = self.get_url(search_url, returns='text') if not data: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue if not data.startswith(' # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . + +from __future__ import unicode_literals import re -from requests.utils import dict_from_cookiejar import traceback +from requests.utils import dict_from_cookiejar + from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser + from sickrage.helper.common import try_int from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -32,7 +34,7 @@ class BlueTigersProvider(TorrentProvider): # pylint: disable=too-many-instance- def __init__(self): - TorrentProvider.__init__(self, "BLUETIGERS") + TorrentProvider.__init__(self, 'BLUETIGERS') self.username = None self.password = None @@ -48,7 +50,7 @@ def __init__(self): } self.search_params = { - "c16": 1, "c10": 1, "c130": 1, "c131": 1, "c17": 1, "c18": 1, "c19": 1, "c9": 1 + 'c16': 1, 'c10': 1, 'c130': 1, 'c131': 1, 'c17': 1, 'c18': 1, 'c19': 1, 'c9': 1 } self.url = self.urls['base_url'] @@ -70,27 +72,28 @@ def login(self): if re.search('account-logout.php', check_login): return True else: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if re.search('account-login.php', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals results = [] + if not self.login(): return results for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {}'.format(search_string.decode('utf-8')), logger.DEBUG) self.search_params['search'] = search_string @@ -101,17 +104,17 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man try: with BS4Parser(data, 'html5lib') as html: - result_linkz = html('a', href=re.compile("torrents-details")) + result_linkz = html('a', href=re.compile('torrents-details')) if not result_linkz: - logger.log(u"Data returned from provider do not contains any torrent", logger.DEBUG) + logger.log('Data returned from provider do not contains any torrent', logger.DEBUG) continue if result_linkz: for link in result_linkz: title = link.text download_url = self.urls['base_url'] + link['href'] - download_url = download_url.replace("torrents-details", "download") + download_url = download_url.replace('torrents-details', 'download') # FIXME size = -1 seeders = 1 @@ -123,18 +126,29 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent # if seeders < min(self.minseed, 1): # if mode != 'RSS': - # logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + # logger.log('Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})'.format # (title, seeders), logger.DEBUG) # continue - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': - logger.log(u"Found result: %s " % title, logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) items.append(item) - except Exception: - logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/btdigg.py b/sickbeard/providers/btdigg.py index 489f5b0705..c41fe5cd76 100644 --- a/sickbeard/providers/btdigg.py +++ b/sickbeard/providers/btdigg.py @@ -1,26 +1,25 @@ # coding=utf-8 # Author: Jodi Jones # Rewrite: Gonçalo M. (aka duramato/supergonkas) - # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals -import validators +import traceback from sickbeard import logger, tvcache @@ -31,92 +30,105 @@ class BTDiggProvider(TorrentProvider): def __init__(self): - + # Provider Init - TorrentProvider.__init__(self, "BTDigg") + TorrentProvider.__init__(self, 'BTDigg') self.public = True - - # Torrent Stats + + # Torrent Stats self.minseed = None self.minleech = None # URLs - self.url = "https://btdigg.org" - self.urls = {"api": "https://api.btdigg.org/api/private-341ada3245790954/s02"} + self.url = 'https://btdigg.org' + self.urls = {'api': 'https://api.btdigg.org/api/private-341ada3245790954/s02'} self.custom_url = None - + # Proper Strings - self.proper_strings = ["PROPER", "REPACK"] + self.proper_strings = ['PROPER', 'REPACK'] # Use this hacky way for RSS search since most results will use this codecs - cache_params = {"RSS": ["x264", "x264.HDTV", "720.HDTV.x264"]} + cache_params = {'RSS': ['x264', 'x264.HDTV', '720.HDTV.x264']} # Only poll BTDigg every 30 minutes max, since BTDigg takes some time to crawl self.cache = tvcache.TVCache(self, min_time=30, search_params=cache_params) def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals results = [] - search_params = {"p": 0} + search_params = {'p': 0} for mode in search_strings: items = [] - logger.log("Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: - search_params["q"] = search_string - if mode != "RSS": - search_params["order"] = 0 - logger.log("Search string: {}".format(search_string.decode("utf-8")), + search_params['q'] = search_string + if mode != 'RSS': + search_params['order'] = 0 + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) else: - search_params["order"] = 2 + search_params['order'] = 2 if self.custom_url: # if not validators.url(self.custom_url): - # logger.log("Invalid custom url set, please check your settings", logger.WARNING) + # logger.log('Invalid custom url set, please check your settings', logger.WARNING) # return results - search_url = self.custom_url + "api/private-341ada3245790954/s02" + search_url = self.custom_url + 'api/private-341ada3245790954/s02' else: - search_url = self.urls["api"] - jdata = self.get_url(search_url, params=search_params, returns="json") + search_url = self.urls['api'] + jdata = self.get_url(search_url, params=search_params, returns='json') if not jdata: - logger.log("Provider did not return data", logger.DEBUG) + logger.log('Provider did not return data', logger.DEBUG) continue for torrent in jdata: try: - title = torrent.pop("name", "") - download_url = torrent.pop("magnet") + self._custom_trackers if torrent["magnet"] else None + title = torrent.pop('name', '') + download_url = torrent.pop('magnet') + self._custom_trackers if torrent['magnet'] else None if not all([title, download_url]): continue - if float(torrent.pop("ff")): - logger.log("Ignoring result for {} since it's been reported as fake (level = {})".format - (title, torrent["ff"]), logger.DEBUG) + if float(torrent.pop('ff')): + logger.log("Ignoring result for {0} since it's been" + ' reported as fake (level = {1})'.format + (title, torrent['ff']), logger.DEBUG) continue - if not int(torrent.pop("files")): - logger.log("Ignoring result for {} because it has no files".format + if not int(torrent.pop('files')): + logger.log('Ignoring result for {0} because it has no files'.format (title), logger.DEBUG) continue - leechers = torrent.pop("leechers", 0) - seeders = torrent.pop("seeders", 1) + + leechers = torrent.pop('leechers', 0) + seeders = torrent.pop('seeders', 1) # Filter unseeded torrent if seeders < min(self.minseed, 1): - if mode != "RSS": + if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1})".format + ' minimum seeders: {0}. Seeders: {1})'.format (title, seeders), logger.DEBUG) - continue - torrent_size = torrent.pop("size") + continue + + torrent_size = torrent.pop('size') size = convert_size(torrent_size) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != "RSS": - logger.log("Found result: %s " % title, logger.DEBUG) + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) items.append(item) - - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue results += items diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py index e08f7beccf..f9e0c41ef4 100644 --- a/sickbeard/providers/btn.py +++ b/sickbeard/providers/btn.py @@ -1,22 +1,22 @@ # coding=utf-8 # Author: Daniel Heimans # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . + +from __future__ import unicode_literals from datetime import datetime import jsonrpclib @@ -38,7 +38,7 @@ class BTNProvider(TorrentProvider): def __init__(self): - TorrentProvider.__init__(self, "BTN") + TorrentProvider.__init__(self, 'BTN') self.supports_absolute_numbering = True @@ -46,26 +46,26 @@ def __init__(self): self.cache = BTNCache(self, min_time=15) # Only poll BTN every 15 minutes max - self.urls = {'base_url': u'http://api.btnapps.net', - 'website': u'http://broadcasthe.net/', } + self.urls = {'base_url': 'http://api.btnapps.net', + 'website': 'http://broadcasthe.net/', } self.url = self.urls['website'] def _check_auth(self): if not self.api_key: - logger.log(u"Invalid api key. Check your settings", logger.WARNING) + logger.log('Invalid api key. Check your settings', logger.WARNING) return True - def _checkAuthFromData(self, parsedJSON): + def _checkAuthFromData(self, parsed_json): - if parsedJSON is None: + if parsed_json is None: return self._check_auth() - if 'api-error' in parsedJSON: - logger.log(u"Incorrect authentication credentials: % s" % parsedJSON['api-error'], logger.DEBUG) + if 'api-error' in parsed_json: + logger.log('Incorrect authentication credentials: % s' % parsed_json['api-error'], logger.DEBUG) raise AuthException( - "Your authentication credentials for " + self.name + " are incorrect, check your config.") + 'Your authentication credentials for ' + self.name + ' are incorrect, check your config.') return True @@ -79,21 +79,21 @@ def search(self, search_params, age=0, ep_obj=None): # pylint:disable=too-many- # age in seconds if age: - params['age'] = "<=" + str(int(age)) + params['age'] = '<=' + str(int(age)) if search_params: params.update(search_params) - logger.log(u"Search string: %s" % search_params, logger.DEBUG) + logger.log('Search string: %s' % search_params, logger.DEBUG) - parsedJSON = self._api_call(apikey, params) - if not parsedJSON: - logger.log(u"No data returned from provider", logger.DEBUG) + parsed_json = self._api_call(apikey, params) + if not parsed_json: + logger.log('No data returned from provider', logger.DEBUG) return results - if self._checkAuthFromData(parsedJSON): + if self._checkAuthFromData(parsed_json): - if 'torrents' in parsedJSON: - found_torrents = parsedJSON['torrents'] + if 'torrents' in parsed_json: + found_torrents = parsed_json['torrents'] else: found_torrents = {} @@ -104,24 +104,24 @@ def search(self, search_params, age=0, ep_obj=None): # pylint:disable=too-many- max_pages = 150 results_per_page = 1000 - if 'results' in parsedJSON and int(parsedJSON['results']) >= results_per_page: - pages_needed = int(math.ceil(int(parsedJSON['results']) / results_per_page)) + if 'results' in parsed_json and int(parsed_json['results']) >= results_per_page: + pages_needed = int(math.ceil(int(parsed_json['results']) / results_per_page)) if pages_needed > max_pages: pages_needed = max_pages # +1 because range(1,4) = 1, 2, 3 for page in range(1, pages_needed + 1): - parsedJSON = self._api_call(apikey, params, results_per_page, page * results_per_page) + parsed_json = self._api_call(apikey, params, results_per_page, page * results_per_page) # Note that this these are individual requests and might time out individually. This would result in 'gaps' # in the results. There is no way to fix this though. - if 'torrents' in parsedJSON: - found_torrents.update(parsedJSON['torrents']) + if 'torrents' in parsed_json: + found_torrents.update(parsed_json['torrents']) for _, torrent_info in found_torrents.iteritems(): (title, url) = self._get_title_and_url(torrent_info) if title and url: - logger.log(u"Found result: %s " % title, logger.DEBUG) + logger.log('Found result: %s ' % title, logger.DEBUG) results.append(torrent_info) # FIXME SORT RESULTS @@ -130,66 +130,67 @@ def search(self, search_params, age=0, ep_obj=None): # pylint:disable=too-many- def _api_call(self, apikey, params=None, results_per_page=1000, offset=0): server = jsonrpclib.Server(self.urls['base_url']) - parsedJSON = {} + parsed_json = {} try: - parsedJSON = server.getTorrents(apikey, params or {}, int(results_per_page), int(offset)) + parsed_json = server.getTorrents(apikey, params or {}, int(results_per_page), int(offset)) time.sleep(cpu_presets[sickbeard.CPU_PRESET]) except jsonrpclib.jsonrpc.ProtocolError, error: if error.message == 'Call Limit Exceeded': - logger.log(u"You have exceeded the limit of 150 calls per hour, per API key which is unique to your user account", logger.WARNING) + logger.log('You have exceeded the limit of 150 calls per hour,' + ' per API key which is unique to your user account', logger.WARNING) else: - logger.log(u"JSON-RPC protocol error while accessing provicer. Error: %s " % repr(error), logger.ERROR) - parsedJSON = {'api-error': ex(error)} - return parsedJSON + logger.log('JSON-RPC protocol error while accessing provicer. Error: %s ' % repr(error), logger.ERROR) + parsed_json = {'api-error': ex(error)} + return parsed_json except socket.timeout: - logger.log(u"Timeout while accessing provider", logger.WARNING) + logger.log('Timeout while accessing provider', logger.WARNING) except socket.error, error: # Note that sometimes timeouts are thrown as socket errors - logger.log(u"Socket error while accessing provider. Error: %s " % error[1], logger.WARNING) + logger.log('Socket error while accessing provider. Error: %s ' % error[1], logger.WARNING) except Exception, error: errorstring = str(error) if errorstring.startswith('<') and errorstring.endswith('>'): errorstring = errorstring[1:-1] - logger.log(u"Unknown error while accessing provider. Error: %s " % errorstring, logger.WARNING) + logger.log('Unknown error while accessing provider. Error: %s ' % errorstring, logger.WARNING) - return parsedJSON + return parsed_json - def _get_title_and_url(self, parsedJSON): + def _get_title_and_url(self, parsed_json): # The BTN API gives a lot of information in response, # however SickRage is built mostly around Scene or # release names, which is why we are using them here. - if 'ReleaseName' in parsedJSON and parsedJSON['ReleaseName']: - title = parsedJSON['ReleaseName'] + if 'ReleaseName' in parsed_json and parsed_json['ReleaseName']: + title = parsed_json['ReleaseName'] else: # If we don't have a release name we need to get creative - title = u'' - if 'Series' in parsedJSON: - title += parsedJSON['Series'] - if 'GroupName' in parsedJSON: - title += '.' + parsedJSON['GroupName'] if title else parsedJSON['GroupName'] - if 'Resolution' in parsedJSON: - title += '.' + parsedJSON['Resolution'] if title else parsedJSON['Resolution'] - if 'Source' in parsedJSON: - title += '.' + parsedJSON['Source'] if title else parsedJSON['Source'] - if 'Codec' in parsedJSON: - title += '.' + parsedJSON['Codec'] if title else parsedJSON['Codec'] + title = '' + if 'Series' in parsed_json: + title += parsed_json['Series'] + if 'GroupName' in parsed_json: + title += '.' + parsed_json['GroupName'] if title else parsed_json['GroupName'] + if 'Resolution' in parsed_json: + title += '.' + parsed_json['Resolution'] if title else parsed_json['Resolution'] + if 'Source' in parsed_json: + title += '.' + parsed_json['Source'] if title else parsed_json['Source'] + if 'Codec' in parsed_json: + title += '.' + parsed_json['Codec'] if title else parsed_json['Codec'] if title: title = title.replace(' ', '.') url = None - if 'DownloadURL' in parsedJSON: - url = parsedJSON['DownloadURL'] + if 'DownloadURL' in parsed_json: + url = parsed_json['DownloadURL'] if url: # unescaped / is valid in JSON, but it can be escaped - url = url.replace("\\/", "/") + url = url.replace('\\/', '/') return title, url @@ -202,7 +203,7 @@ def _get_season_search_strings(self, ep_obj): # Search for the year of the air by date show current_params['name'] = str(ep_obj.airdate).split('-')[0] elif ep_obj.show.is_anime: - current_params['name'] = "%d" % ep_obj.scene_absolute_number + current_params['name'] = '%d' % ep_obj.scene_absolute_number else: current_params['name'] = 'Season ' + str(ep_obj.scene_season) @@ -236,10 +237,10 @@ def _get_episode_search_strings(self, ep_obj, add_string=''): # combined with the series identifier should result in just one episode search_params['name'] = date_str.replace('-', '.') elif ep_obj.show.anime: - search_params['name'] = "%i" % int(ep_obj.scene_absolute_number) + search_params['name'] = '%i' % int(ep_obj.scene_absolute_number) else: # Do a general name search for the episode, formatted like SXXEYY - search_params['name'] = u"{ep}".format(ep=episode_num(ep_obj.scene_season, ep_obj.scene_episode)) + search_params['name'] = '{ep}'.format(ep=episode_num(ep_obj.scene_season, ep_obj.scene_episode)) # search if ep_obj.show.indexer == 1: @@ -291,14 +292,15 @@ def _getRSSData(self): if seconds_since_last_update < seconds_minTime: seconds_since_last_update = seconds_minTime - # Set maximum to 24 hours (24 * 60 * 60 = 86400 seconds) of "RSS" data search, older things will need to be done through backlog + # Set maximum to 24 hours (24 * 60 * 60 = 86400 seconds) of 'RSS' data search, older things will need to be done through backlog if seconds_since_last_update > 86400: logger.log( - u"The last known successful update was more than 24 hours ago, only trying to fetch the last 24 hours!", + 'The last known successful update was more than 24 hours ago, only trying to fetch the last 24 hours!', logger.DEBUG) seconds_since_last_update = 86400 self.search_params = None # BTN cache does not use search params return {'entries': self.provider.search(search_params=self.search_params, age=seconds_since_last_update)} + provider = BTNProvider() diff --git a/sickbeard/providers/cpasbien.py b/sickbeard/providers/cpasbien.py index 7802a4c420..d72a4ff411 100644 --- a/sickbeard/providers/cpasbien.py +++ b/sickbeard/providers/cpasbien.py @@ -1,24 +1,25 @@ # coding=utf-8 # Author: Guillaume Serre # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . + +from __future__ import unicode_literals import re +import traceback from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser @@ -31,25 +32,26 @@ class CpasbienProvider(TorrentProvider): def __init__(self): - TorrentProvider.__init__(self, "Cpasbien") + TorrentProvider.__init__(self, 'Cpasbien') self.public = True self.minseed = None self.minleech = None - self.url = "http://www.cpasbien.cm" + self.url = 'http://www.cpasbien.cm' self.proper_strings = ['PROPER', 'REPACK'] self.cache = tvcache.TVCache(self) def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals results = [] + for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) search_url = self.url + '/recherche/' + search_string.replace('.', '-').replace(' ', '-') + '.html,trie-seeds-d' else: @@ -63,32 +65,44 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man torrent_rows = html(class_=re.compile('ligne[01]')) for result in torrent_rows: try: - title = result.find(class_="titre").get_text(strip=True).replace("HDTV", "HDTV x264-CPasBien") + title = result.find(class_='titre').get_text(strip=True).replace('HDTV', 'HDTV x264-CPasBien') title = re.sub(r' Saison', ' Season', title, flags=re.IGNORECASE) - tmp = result.find("a")['href'].split('/')[-1].replace('.html', '.torrent').strip() + tmp = result.find('a')['href'].split('/')[-1].replace('.html', '.torrent').strip() download_url = (self.url + '/telechargement/%s' % tmp) if not all([title, download_url]): continue - seeders = try_int(result.find(class_="up").get_text(strip=True)) - leechers = try_int(result.find(class_="down").get_text(strip=True)) + seeders = try_int(result.find(class_='up').get_text(strip=True)) + leechers = try_int(result.find(class_='down').get_text(strip=True)) if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1})'.format (title, seeders), logger.DEBUG) continue - torrent_size = result.find(class_="poid").get_text(strip=True) + torrent_size = result.find(class_='poid').get_text(strip=True) units = ['o', 'Ko', 'Mo', 'Go', 'To', 'Po'] size = convert_size(torrent_size, units=units) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': - logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) items.append(item) - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue results += items From 9598985b134dda05b05da9f3601c52462c676104 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 4 Jun 2016 20:29:01 +0200 Subject: [PATCH 024/134] Small anizb update --- sickbeard/providers/anizb.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sickbeard/providers/anizb.py b/sickbeard/providers/anizb.py index 5a4ec03068..5df071bdd8 100644 --- a/sickbeard/providers/anizb.py +++ b/sickbeard/providers/anizb.py @@ -75,17 +75,17 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man data = self.get_url(search_url, returns='text') if not data: - logger.log(u'No data returned from provider', logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue if not data.startswith(' Date: Sat, 4 Jun 2016 20:32:49 +0200 Subject: [PATCH 025/134] Small bluetigers update --- sickbeard/providers/bluetigers.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/sickbeard/providers/bluetigers.py b/sickbeard/providers/bluetigers.py index eb4c565f43..93af2e4a18 100644 --- a/sickbeard/providers/bluetigers.py +++ b/sickbeard/providers/bluetigers.py @@ -26,7 +26,6 @@ from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser -from sickrage.helper.common import try_int from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -89,11 +88,11 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log('Search Mode: {}'.format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {}'.format(search_string.decode('utf-8')), + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) self.search_params['search'] = search_string From d74dae12159506901bfb92411a80c9375d8e49a9 Mon Sep 17 00:00:00 2001 From: medariox Date: Sun, 5 Jun 2016 13:30:45 +0200 Subject: [PATCH 026/134] Next 10 providers --- sickbeard/providers/danishbits.py | 52 +++++++++------ sickbeard/providers/elitetorrent.py | 84 ++++++++++++----------- sickbeard/providers/extratorrent.py | 76 ++++++++++++--------- sickbeard/providers/freshontv.py | 65 ++++++++++-------- sickbeard/providers/gftracker.py | 48 ++++++++----- sickbeard/providers/hd4free.py | 62 ++++++++++------- sickbeard/providers/hdbits.py | 57 ++++++++-------- sickbeard/providers/hdspace.py | 66 ++++++++++-------- sickbeard/providers/hdtorrents.py | 88 ++++++++++++++---------- sickbeard/providers/hounddawgs.py | 100 ++++++++++++++++------------ 10 files changed, 406 insertions(+), 292 deletions(-) diff --git a/sickbeard/providers/danishbits.py b/sickbeard/providers/danishbits.py index 56b414e540..e4d6415f2a 100644 --- a/sickbeard/providers/danishbits.py +++ b/sickbeard/providers/danishbits.py @@ -1,22 +1,24 @@ # coding=utf-8 # Author: Dustyn Gibson # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . + +from __future__ import unicode_literals + +import traceback from requests.utils import dict_from_cookiejar @@ -32,7 +34,7 @@ class DanishbitsProvider(TorrentProvider): # pylint: disable=too-many-instance- def __init__(self): # Provider Init - TorrentProvider.__init__(self, "Danishbits") + TorrentProvider.__init__(self, 'Danishbits') # Credentials self.username = None @@ -69,12 +71,12 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) self.session.cookies.clear() return False if 'Login :: Danishbits.org' in response: - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) self.session.cookies.clear() return False @@ -105,28 +107,28 @@ def process_column_header(td): for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) search_params['search'] = search_string data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue with BS4Parser(data, 'html5lib') as html: torrent_table = html.find('table', id='torrent_table') torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if at least one Release is found + # Continue only if at least one release is found if len(torrent_rows) < 2: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue # Literal: Navn, Størrelse, Kommentarer, Tilføjet, Snatches, Seeders, Leechers @@ -138,7 +140,7 @@ def process_column_header(td): try: title = result.find(class_='croptorrenttext').get_text(strip=True) - download_url = self.url + result.find(title="Direkte download link")['href'] + download_url = self.url + result.find(title='Direkte download link')['href'] if not all([title, download_url]): continue @@ -150,8 +152,8 @@ def process_column_header(td): # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the" - u" minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1})'.format (title, seeders), logger.DEBUG) continue @@ -162,13 +164,23 @@ def process_column_header(td): torrent_size = cells[labels.index('Størrelse')].contents[0] size = convert_size(torrent_size, units=units) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': - logger.log(u"Found result: {0} with {1} seeders and {2} leechers".format + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) items.append(item) - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue results += items diff --git a/sickbeard/providers/elitetorrent.py b/sickbeard/providers/elitetorrent.py index c0720c8295..4fc3b1a085 100644 --- a/sickbeard/providers/elitetorrent.py +++ b/sickbeard/providers/elitetorrent.py @@ -1,22 +1,22 @@ # coding=utf-8 # Author: CristianBB # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . + +from __future__ import unicode_literals import re import traceback @@ -32,7 +32,7 @@ class elitetorrentProvider(TorrentProvider): def __init__(self): - TorrentProvider.__init__(self, "EliteTorrent") + TorrentProvider.__init__(self, 'EliteTorrent') self.onlyspasearch = None self.minseed = None @@ -50,7 +50,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man results = [] lang_info = '' if not ep_obj or not ep_obj.show else ep_obj.show.lang - """ + ''' Search query: http://www.elitetorrent.net/torrents.php?cat=4&modo=listado&orden=fecha&pag=1&buscar=fringe @@ -59,7 +59,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man orden = fecha => order buscar => Search show pag = 1 => page number - """ + ''' search_params = { 'cat': 4, @@ -67,21 +67,20 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'orden': 'fecha', 'pag': 1, 'buscar': '' - } for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) # Only search if user conditions are true if self.onlyspasearch and lang_info != 'es' and mode != 'RSS': - logger.log(u"Show info is not spanish, skipping provider search", logger.DEBUG) + logger.log('Show info is not spanish, skipping provider search', logger.DEBUG) continue for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) search_string = re.sub(r'S0*(\d*)E(\d*)', r'\1x\2', search_string) @@ -91,27 +90,24 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if not data: continue - try: - with BS4Parser(data, 'html5lib') as html: - torrent_table = html.find('table', class_='fichas-listado') - torrent_rows = torrent_table('tr') if torrent_table else [] - - if len(torrent_rows) < 2: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) - continue + with BS4Parser(data, 'html5lib') as html: + torrent_table = html.find('table', class_='fichas-listado') + torrent_rows = torrent_table('tr') if torrent_table else [] - for row in torrent_rows[1:]: - try: - download_url = self.urls['base_url'] + row.find('a')['href'] - title = self._processTitle(row.find('a', class_='nombre')['title']) - seeders = try_int(row.find('td', class_='semillas').get_text(strip=True)) - leechers = try_int(row.find('td', class_='clientes').get_text(strip=True)) + # Continue only if at least one release is found + if len(torrent_rows) < 2: + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + continue - # Provider does not provide size - size = -1 + for row in torrent_rows[1:]: + try: + download_url = self.urls['base_url'] + row.find('a')['href'] + title = self._process_title(row.find('a', class_='nombre')['title']) + seeders = try_int(row.find('td', class_='semillas').get_text(strip=True)) + leechers = try_int(row.find('td', class_='clientes').get_text(strip=True)) - except (AttributeError, TypeError, KeyError, ValueError): - continue + # Provider does not provide size + size = -1 if not all([title, download_url]): continue @@ -119,25 +115,36 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1})'.format (title, seeders), logger.DEBUG) continue - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': - logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) items.append(item) - - except Exception: - logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.WARNING) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items return results @staticmethod - def _processTitle(title): + def _process_title(title): # Quality, if no literal is defined it's HDTV if 'calidad' not in title: @@ -155,4 +162,5 @@ def _processTitle(title): return title.strip() + provider = elitetorrentProvider() diff --git a/sickbeard/providers/extratorrent.py b/sickbeard/providers/extratorrent.py index 4764ffbb2b..1cb1f4a0ce 100644 --- a/sickbeard/providers/extratorrent.py +++ b/sickbeard/providers/extratorrent.py @@ -2,26 +2,27 @@ # Author: Gonçalo M. (aka duramato/supergonkas) # Author: Dustyn Gibson # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . -import re +from __future__ import unicode_literals +import re +import traceback import sickbeard + from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser from sickbeard.common import USER_AGENT @@ -34,7 +35,7 @@ class ExtraTorrentProvider(TorrentProvider): # pylint: disable=too-many-instanc def __init__(self): - TorrentProvider.__init__(self, "ExtraTorrent") + TorrentProvider.__init__(self, 'ExtraTorrent') self.urls = { 'index': 'http://extratorrent.cc', @@ -56,10 +57,10 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man results = [] for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) self.search_params.update({'type': ('search', 'rss')[mode == 'RSS'], 'search': search_string}) @@ -67,15 +68,15 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man data = self.get_url(search_url, params=self.search_params, returns='text') if not data: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue if not data.startswith('$', '', item.find('title').get_text(strip=True)) seeders = try_int(item.find('seeders').get_text(strip=True)) @@ -89,26 +90,37 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man download_url = re.sub(r'(.*)/torrent/(.*).html', r'\1/download/\2.torrent', download_url) else: info_hash = item.find('info_hash').get_text(strip=True) - download_url = "magnet:?xt=urn:btih:" + info_hash + "&dn=" + title + self._custom_trackers - - except (AttributeError, TypeError, KeyError, ValueError): - continue - - if not all([title, download_url]): - continue - - # Filter unseeded torrent - if seeders < min(self.minseed, 1): + download_url = 'magnet:?xt=urn:btih:' + info_hash + '&dn=' + title + self._custom_trackers + + if not all([title, download_url]): + continue + + # Filter unseeded torrent + if seeders < min(self.minseed, 1): + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1})'.format + (title, seeders), logger.DEBUG) + continue + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format - (title, seeders), logger.DEBUG) - continue - - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != 'RSS': - logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) - items.append(item) + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index 95ce723fdc..59b7c10ed1 100644 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -1,28 +1,29 @@ # coding=utf-8 # Author: Idan Gutman # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . + +from __future__ import unicode_literals import re -from requests.utils import add_dict_to_cookiejar, dict_from_cookiejar import time import traceback +from requests.utils import add_dict_to_cookiejar, dict_from_cookiejar + from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser @@ -34,7 +35,7 @@ class FreshOnTVProvider(TorrentProvider): # pylint: disable=too-many-instance-a def __init__(self): - TorrentProvider.__init__(self, "FreshOnTV") + TorrentProvider.__init__(self, 'FreshOnTV') self._uid = None self._hash = None @@ -59,7 +60,7 @@ def __init__(self): def _check_auth(self): if not self.username or not self.password: - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return True @@ -76,7 +77,7 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if re.search('/logout.php', response): @@ -90,15 +91,15 @@ def login(self): 'pass': self._hash} return True except Exception: - logger.log(u"Unable to login to provider (cookie)", logger.WARNING) + logger.log('Unable to login to provider (cookie)', logger.WARNING) return False else: if re.search('Username does not exist in the userbase or the account is not confirmed yet.', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) if re.search('DDoS protection by CloudFlare', response): - logger.log(u"Unable to login to provider due to CloudFlare DDoS javascript check", logger.WARNING) + logger.log('Unable to login to provider due to CloudFlare DDoS javascript check', logger.WARNING) return False @@ -111,11 +112,11 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many for mode in search_params: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_params[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) search_url = self.urls['search'] % (freeleech, search_string) @@ -125,7 +126,7 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many max_page_number = 0 if not init_html: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue try: @@ -153,7 +154,7 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many if max_page_number > 3 and mode == 'RSS': max_page_number = 3 except Exception: - logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) + logger.log('Failed parsing provider. Traceback: %s' % traceback.format_exc(), logger.ERROR) continue data_response_list = [init_html] @@ -164,7 +165,7 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many time.sleep(1) page_search_url = search_url + '&page=' + str(i) - # '.log(u"Search string: " + page_search_url, logger.DEBUG) + # '.log('Search string: ' + page_search_url, logger.DEBUG) page_html = self.get_url(page_search_url, returns='text') if not page_html: @@ -178,11 +179,11 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many with BS4Parser(data_response, 'html5lib') as html: - torrent_rows = html("tr", {"class": re.compile('torrent_[0-9]*')}) + torrent_rows = html('tr', {'class': re.compile('torrent_[0-9]*')}) # Continue only if a Release is found if not torrent_rows: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue for individual_torrent in torrent_rows: @@ -194,7 +195,7 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many try: title = individual_torrent.find('a', {'class': 'torrent_name_link'})['title'] except Exception: - logger.log(u"Unable to parse torrent title. Traceback: %s " % traceback.format_exc(), logger.WARNING) + logger.log('Unable to parse torrent title. Traceback: %s ' % traceback.format_exc(), logger.WARNING) continue try: @@ -214,18 +215,30 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1})'.format (title, seeders), logger.DEBUG) continue - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': - logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) items.append(item) - except Exception: - logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/gftracker.py b/sickbeard/providers/gftracker.py index 85c78b4eba..28e4943a16 100644 --- a/sickbeard/providers/gftracker.py +++ b/sickbeard/providers/gftracker.py @@ -1,24 +1,26 @@ # coding=utf-8 # Author: medariox # based on Dustyn Gibson's work - # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . + +from __future__ import unicode_literals import re +import traceback from requests.utils import dict_from_cookiejar @@ -35,7 +37,7 @@ class GFTrackerProvider(TorrentProvider): # pylint: disable=too-many-instance-a def __init__(self): # Provider Init - TorrentProvider.__init__(self, "GFTracker") + TorrentProvider.__init__(self, 'GFTracker') # Credentials self.username = None @@ -61,7 +63,7 @@ def __init__(self): def _check_auth(self): if not self.username or not self.password: - raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.") + raise AuthException('Your authentication credentials for ' + self.name + ' are missing, check your config.') return True @@ -78,11 +80,11 @@ def login(self): self.get_url(self.url, returns='text') response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if re.search('Username or password incorrect', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -118,19 +120,19 @@ def process_column_header(td): for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) search_params['search'] = search_string data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue with BS4Parser(data, 'html5lib') as html: @@ -139,7 +141,7 @@ def process_column_header(td): # Continue only if at least one Release is found if len(torrent_rows) < 2: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue labels = [process_column_header(label) for label in torrent_rows[0]('td')] @@ -162,21 +164,31 @@ def process_column_header(td): # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the" - u" minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1})'.format (title, seeders), logger.DEBUG) continue torrent_size = cells[labels.index('Size/Snatched')].get_text(strip=True).split('/', 1)[0] size = convert_size(torrent_size, units=units) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': - logger.log(u"Found result: {0} with {1} seeders and {2} leechers".format + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) items.append(item) - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue results += items diff --git a/sickbeard/providers/hd4free.py b/sickbeard/providers/hd4free.py index 27d94860ea..3cfa9e65c1 100644 --- a/sickbeard/providers/hd4free.py +++ b/sickbeard/providers/hd4free.py @@ -1,22 +1,24 @@ # coding=utf-8 # Author: Gonçalo M. (aka duramato/supergonkas) # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . + +from __future__ import unicode_literals + +import traceback from requests.compat import urljoin from sickbeard import logger, tvcache @@ -29,7 +31,7 @@ class HD4FreeProvider(TorrentProvider): # pylint: disable=too-many-instance-att def __init__(self): - TorrentProvider.__init__(self, "HD4Free") + TorrentProvider.__init__(self, 'HD4Free') self.url = 'https://hd4free.xyz' self.urls = {'search': urljoin(self.url, '/searchapi.php')} @@ -62,7 +64,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if self.freeleech: search_params['fl'] = 'true' @@ -70,56 +72,68 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_params.pop('fl', '') if mode != 'RSS': - logger.log(u"Search string: {0}".format(search_string), logger.DEBUG) + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_params['search'] = search_string else: search_params.pop('search', '') try: jdata = self.get_url(self.urls['search'], params=search_params, returns='json') except ValueError: - logger.log("No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue - + if not jdata: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue - + error = jdata.get('error') if error: - logger.log(u"{}".format(error), logger.DEBUG) + logger.log('{0}'.format(error), logger.DEBUG) return results try: if jdata['0']['total_results'] == 0: - logger.log(u"Provider has no results for this search", logger.DEBUG) + logger.log('Provider has no results for this search', logger.DEBUG) continue except StandardError: continue for i in jdata: try: - title = jdata[i]["release_name"] - download_url = jdata[i]["download_url"] + title = jdata[i]['release_name'] + download_url = jdata[i]['download_url'] if not all([title, download_url]): continue - seeders = jdata[i]["seeders"] - leechers = jdata[i]["leechers"] + seeders = jdata[i]['seeders'] + leechers = jdata[i]['leechers'] if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1})'.format (title, seeders), logger.DEBUG) continue - torrent_size = str(jdata[i]["size"]) + ' MB' + torrent_size = str(jdata[i]['size']) + ' MB' size = convert_size(torrent_size) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': - logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) items.append(item) - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue results += items diff --git a/sickbeard/providers/hdbits.py b/sickbeard/providers/hdbits.py index f14d5ad5e3..fa22dfcdb9 100644 --- a/sickbeard/providers/hdbits.py +++ b/sickbeard/providers/hdbits.py @@ -1,21 +1,21 @@ # coding=utf-8 # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . + +from __future__ import unicode_literals import datetime import json @@ -32,7 +32,7 @@ class HDBitsProvider(TorrentProvider): def __init__(self): - TorrentProvider.__init__(self, "HDBits") + TorrentProvider.__init__(self, 'HDBits') self.username = None self.passkey = None @@ -49,24 +49,24 @@ def __init__(self): def _check_auth(self): if not self.username or not self.passkey: - raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.") + raise AuthException('Your authentication credentials for ' + self.name + ' are missing, check your config.') return True - def _checkAuthFromData(self, parsedJSON): + def _check_auth_from_data(self, parsed_json): - if 'status' in parsedJSON and 'message' in parsedJSON: - if parsedJSON.get('status') == 5: - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + if 'status' in parsed_json and 'message' in parsed_json: + if parsed_json.get('status') == 5: + logger.log('Invalid username or password. Check your settings', logger.WARNING) return True def _get_season_search_strings(self, ep_obj): - season_search_string = [self._make_post_data_JSON(show=ep_obj.show, season=ep_obj)] + season_search_string = [self._make_post_data_json(show=ep_obj.show, season=ep_obj)] return season_search_string def _get_episode_search_strings(self, ep_obj, add_string=''): - episode_search_string = [self._make_post_data_JSON(show=ep_obj.show, episode=ep_obj)] + episode_search_string = [self._make_post_data_json(show=ep_obj.show, episode=ep_obj)] return episode_search_string def _get_title_and_url(self, item): @@ -80,19 +80,19 @@ def search(self, search_params, age=0, ep_obj=None): # FIXME results = [] - logger.log(u"Search string: %s" % search_params, logger.DEBUG) + logger.log('Search string: {0}'.format(search_params), logger.DEBUG) self._check_auth() - parsedJSON = self.get_url(self.urls['search'], post_data=search_params, returns='json') - if not parsedJSON: + parsed_json = self.get_url(self.urls['search'], post_data=search_params, returns='json') + if not parsed_json: return [] - if self._checkAuthFromData(parsedJSON): - if parsedJSON and 'data' in parsedJSON: - items = parsedJSON['data'] + if self._check_auth_from_data(parsed_json): + if parsed_json and 'data' in parsed_json: + items = parsed_json['data'] else: - logger.log(u"Resulting JSON from provider isn't correct, not parsing it", logger.ERROR) + logger.log("Resulting JSON from provider isn't correct, not parsing it", logger.ERROR) items = [] for item in items: @@ -106,7 +106,7 @@ def find_propers(self, search_date=None): search_terms = [' proper ', ' repack '] for term in search_terms: - for item in self.search(self._make_post_data_JSON(search_term=term)): + for item in self.search(self._make_post_data_json(search_term=term)): if item['utadded']: try: result_date = datetime.datetime.fromtimestamp(int(item['utadded'])) @@ -120,7 +120,7 @@ def find_propers(self, search_date=None): return results - def _make_post_data_JSON(self, show=None, episode=None, season=None, search_term=None): + def _make_post_data_json(self, show=None, episode=None, season=None, search_term=None): post_data = { 'username': self.username, @@ -143,7 +143,7 @@ def _make_post_data_JSON(self, show=None, episode=None, season=None, search_term elif show.anime: post_data['tvdb'] = { 'id': show.indexerid, - 'episode': "%i" % int(episode.scene_absolute_number) + 'episode': '%i' % int(episode.scene_absolute_number) } else: post_data['tvdb'] = { @@ -161,7 +161,7 @@ def _make_post_data_JSON(self, show=None, episode=None, season=None, search_term elif show.anime: post_data['tvdb'] = { 'id': show.indexerid, - 'season': "%d" % season.scene_absolute_number, + 'season': '%d' % season.scene_absolute_number, } else: post_data['tvdb'] = { @@ -181,13 +181,14 @@ def _getRSSData(self): results = [] try: - parsedJSON = self.provider.getURL(self.provider.urls['rss'], post_data=self.provider._make_post_data_JSON(), returns='json') + parsed_json = self.provider.getURL(self.provider.urls['rss'], post_data=self.provider._make_post_data_json(), returns='json') - if self.provider._checkAuthFromData(parsedJSON): - results = parsedJSON['data'] + if self.provider._check_auth_from_data(parsed_json): + results = parsed_json['data'] except Exception: pass return {'entries': results} + provider = HDBitsProvider() diff --git a/sickbeard/providers/hdspace.py b/sickbeard/providers/hdspace.py index 24f26183a0..9730291e00 100644 --- a/sickbeard/providers/hdspace.py +++ b/sickbeard/providers/hdspace.py @@ -2,30 +2,32 @@ # Author: Idan Gutman # Modified by jkaberg, https://github.com/jkaberg for SceneAccess # Modified by 7ca for HDSpace - # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . + +from __future__ import unicode_literals import re -from requests.utils import dict_from_cookiejar -from bs4 import BeautifulSoup +import traceback +from requests.utils import dict_from_cookiejar from requests.compat import quote_plus from sickbeard import logger, tvcache +from sickbeard.bs4_parser import BS4Parser from sickrage.helper.common import convert_size, try_int from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -35,7 +37,7 @@ class HDSpaceProvider(TorrentProvider): # pylint: disable=too-many-instance-att def __init__(self): - TorrentProvider.__init__(self, "HDSpace") + TorrentProvider.__init__(self, 'HDSpace') self.username = None self.password = None @@ -44,10 +46,10 @@ def __init__(self): self.cache = tvcache.TVCache(self, min_time=10) # only poll HDSpace every 10 minutes max - self.urls = {'base_url': u'https://hd-space.org/', - 'login': u'https://hd-space.org/index.php?page=login', - 'search': u'https://hd-space.org/index.php?page=torrents&search=%s&active=1&options=0', - 'rss': u'https://hd-space.org/rss_torrents.php?feed=dl'} + self.urls = {'base_url': 'https://hd-space.org/', + 'login': 'https://hd-space.org/index.php?page=login', + 'search': 'https://hd-space.org/index.php?page=torrents&search=%s&active=1&options=0', + 'rss': 'https://hd-space.org/rss_torrents.php?feed=dl'} self.categories = [15, 21, 22, 24, 25, 40] # HDTV/DOC 1080/720, bluray, remux self.urls['search'] += '&category=' @@ -61,7 +63,7 @@ def __init__(self): def _check_auth(self): if not self.username or not self.password: - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return True @@ -77,11 +79,11 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if re.search('Password Incorrect', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -93,7 +95,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': search_url = self.urls['search'] % (quote_plus(search_string.replace('.', ' ')),) @@ -101,12 +104,12 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_url = self.urls['search'] % '' if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) data = self.get_url(search_url, returns='text') if not data or 'please try later' in data: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue # Search result page contains some invalid html that prevents html parser from returning all data. @@ -116,12 +119,12 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man data = data.split('
')[1] index = data.index(' # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . + +from __future__ import unicode_literals import re +import traceback + from requests.compat import quote_plus from requests.utils import dict_from_cookiejar @@ -33,7 +35,7 @@ class HDTorrentsProvider(TorrentProvider): # pylint: disable=too-many-instance- def __init__(self): - TorrentProvider.__init__(self, "HDTorrents") + TorrentProvider.__init__(self, 'HDTorrents') self.username = None self.password = None @@ -49,7 +51,7 @@ def __init__(self): self.url = self.urls['base_url'] - self.categories = "&category[]=59&category[]=60&category[]=30&category[]=38" + self.categories = '&category[]=59&category[]=60&category[]=30&category[]=38' self.proper_strings = ['PROPER', 'REPACK'] self.cache = tvcache.TVCache(self, min_time=30) # only poll HDTorrents every 30 minutes max @@ -57,7 +59,7 @@ def __init__(self): def _check_auth(self): if not self.username or not self.password: - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return True @@ -71,11 +73,11 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if re.search('You need cookies enabled to log in.', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -87,12 +89,12 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': search_url = self.urls['search'] % (quote_plus(search_string), self.categories) - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) else: search_url = self.urls['rss'] % self.categories @@ -102,11 +104,11 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man data = self.get_url(search_url, returns='text') if not data or 'please try later' in data: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue if data.find('No torrents here') != -1: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue # Search result page contains some invalid html that prevents html parser from returning all data. @@ -115,14 +117,14 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man try: index = data.lower().index('. +# along with Medusa. If not, see . + +from __future__ import unicode_literals import re import traceback + from requests.utils import dict_from_cookiejar from sickbeard import logger, tvcache @@ -31,7 +34,7 @@ class HoundDawgsProvider(TorrentProvider): # pylint: disable=too-many-instance- def __init__(self): - TorrentProvider.__init__(self, "HoundDawgs") + TorrentProvider.__init__(self, 'HoundDawgs') self.username = None self.password = None @@ -49,19 +52,19 @@ def __init__(self): self.url = self.urls['base_url'] self.search_params = { - "filter_cat[85]": 1, - "filter_cat[58]": 1, - "filter_cat[57]": 1, - "filter_cat[74]": 1, - "filter_cat[92]": 1, - "filter_cat[93]": 1, - "order_by": "s3", - "order_way": "desc", - "type": '', - "userid": '', - "searchstr": '', - "searchimdb": '', - "searchtags": '' + 'filter_cat[85]': 1, + 'filter_cat[58]': 1, + 'filter_cat[57]': 1, + 'filter_cat[74]': 1, + 'filter_cat[92]': 1, + 'filter_cat[93]': 1, + 'order_by': 's3', + 'order_way': 'desc', + 'type': '', + 'userid': '', + 'searchstr': '', + 'searchimdb': '', + 'searchtags': '' } self.cache = tvcache.TVCache(self) @@ -80,13 +83,13 @@ def login(self): self.get_url(self.urls['base_url'], returns='text') response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if re.search('Dit brugernavn eller kodeord er forkert.', response) \ or re.search('Login :: HoundDawgs', response) \ or re.search('Dine cookies er ikke aktiveret.', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -98,32 +101,32 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string.decode('utf-8')), logger.DEBUG) self.search_params['searchstr'] = search_string data = self.get_url(self.urls['search'], params=self.search_params, returns='text') if not data: - logger.log(u'URL did not return data', logger.DEBUG) + logger.log('URL did not return data', logger.DEBUG) continue - strTableStart = "
Date: Thu, 9 Jun 2016 19:27:41 +0200 Subject: [PATCH 027/134] Added unicode_literals to GenericProvider (#677) * Added unicode_literals to GenericProvider * Also adapted all providers, to make use of the future import unicode_literals * Removed the decode()/encode() * Cleaned up some double to single quotes * Added proper exceptions for the provider results items * Some logging cleanup using format() * Now Really remove the .decodes() * Also removed the encodes. * Fixed after a search/replace * Fixed docstrings --- sickbeard/providers/abnormal.py | 2 +- sickbeard/providers/alpharatio.py | 2 +- sickbeard/providers/anizb.py | 10 +- sickbeard/providers/bithdtv.py | 4 +- sickbeard/providers/bitsnoop.py | 3 +- sickbeard/providers/bluetigers.py | 3 +- sickbeard/providers/btdigg.py | 3 +- sickbeard/providers/cpasbien.py | 2 +- sickbeard/providers/danishbits.py | 8 +- sickbeard/providers/elitetorrent.py | 6 +- sickbeard/providers/extratorrent.py | 3 +- sickbeard/providers/freshontv.py | 2 +- sickbeard/providers/gftracker.py | 2 +- sickbeard/providers/hdspace.py | 2 +- sickbeard/providers/hdtorrents.py | 2 +- sickbeard/providers/hounddawgs.py | 2 +- sickbeard/providers/ilovetorrents.py | 52 ++++----- sickbeard/providers/iptorrents.py | 65 ++++++----- sickbeard/providers/kat.py | 78 +++++++------ sickbeard/providers/morethantv.py | 33 +++--- sickbeard/providers/newpct.py | 22 ++-- sickbeard/providers/newznab.py | 18 ++- sickbeard/providers/norbits.py | 62 +++++----- sickbeard/providers/nyaatorrents.py | 17 +-- sickbeard/providers/omgwtfnzbs.py | 23 ++-- sickbeard/providers/pretome.py | 66 ++++++----- sickbeard/providers/rarbg.py | 104 ++++++++--------- sickbeard/providers/rsstorrent.py | 12 +- sickbeard/providers/scc.py | 27 +++-- sickbeard/providers/sceneelite.py | 93 +++++++-------- sickbeard/providers/scenetime.py | 53 +++++---- sickbeard/providers/speedcd.py | 26 +++-- sickbeard/providers/t411.py | 33 +++--- sickbeard/providers/thepiratebay.py | 89 +++++++------- sickbeard/providers/tntvillage.py | 159 +++++++++++++------------- sickbeard/providers/tokyotoshokan.py | 45 ++++---- sickbeard/providers/torrentbytes.py | 79 +++++++------ sickbeard/providers/torrentday.py | 62 +++++----- sickbeard/providers/torrentleech.py | 85 +++++++------- sickbeard/providers/torrentz.py | 26 +++-- sickbeard/providers/transmitthenet.py | 108 +++++++++-------- sickbeard/providers/tvchaosuk.py | 21 ++-- sickbeard/providers/womble.py | 2 +- sickbeard/providers/xthor.py | 35 +++--- sickbeard/providers/zooqle.py | 5 +- sickrage/providers/GenericProvider.py | 47 ++++---- 46 files changed, 854 insertions(+), 749 deletions(-) diff --git a/sickbeard/providers/abnormal.py b/sickbeard/providers/abnormal.py index d550cbad92..35d7c55229 100644 --- a/sickbeard/providers/abnormal.py +++ b/sickbeard/providers/abnormal.py @@ -102,7 +102,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) # Sorting: Available parameters: ReleaseName, Seeders, Leechers, Snatched, Size diff --git a/sickbeard/providers/alpharatio.py b/sickbeard/providers/alpharatio.py index 5306d4a247..e75cd9530a 100644 --- a/sickbeard/providers/alpharatio.py +++ b/sickbeard/providers/alpharatio.py @@ -115,7 +115,7 @@ def process_column_header(td): for search_string in search_strings[mode]: if mode != 'RSS': logger.log('Search string: {search}'.format - (search=search_string.decode('utf-8')), logger.DEBUG) + (search=search_string), logger.DEBUG) search_params['searchstr'] = search_string search_url = self.urls['search'] diff --git a/sickbeard/providers/anizb.py b/sickbeard/providers/anizb.py index 5df071bdd8..2ee9d7403f 100644 --- a/sickbeard/providers/anizb.py +++ b/sickbeard/providers/anizb.py @@ -31,7 +31,7 @@ class Anizb(NZBProvider): # pylint: disable=too-many-instance-attributes - '''Nzb Provider using the open api of anizb.org for daily (rss) and backlog/forced searches''' + """Nzb Provider using the open api of anizb.org for daily (rss) and backlog/forced searches""" def __init__(self): # Provider Init @@ -52,11 +52,11 @@ def __init__(self): self.cache = tvcache.TVCache(self) def _get_size(self, item): - '''Override the default _get_size to prevent it from extracting using it the default tags''' + """Override the default _get_size to prevent it from extracting using it the default tags""" return try_int(item.get('size')) def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals - '''Start searching for anime using the provided search_strings. Used for backlog and daily''' + """Start searching for anime using the provided search_strings. Used for backlog and daily""" results = [] if self.show and not self.show.is_anime: @@ -67,8 +67,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), - logger.DEBUG) + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) try: search_url = (self.urls['rss'], self.urls['api'] + search_string)[mode != 'RSS'] @@ -110,5 +109,4 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results - provider = Anizb() diff --git a/sickbeard/providers/bithdtv.py b/sickbeard/providers/bithdtv.py index 267a71113e..7b90e13c4b 100644 --- a/sickbeard/providers/bithdtv.py +++ b/sickbeard/providers/bithdtv.py @@ -162,8 +162,8 @@ def login(self): return True login_params = { - 'username': self.username.encode('utf-8'), - 'password': self.password.encode('utf-8'), + 'username': self.username, + 'password': self.password, } response = self.get_url(self.urls['login'], post_data=login_params, returns='text') diff --git a/sickbeard/providers/bitsnoop.py b/sickbeard/providers/bitsnoop.py index 4522a8d437..c6af13b84f 100644 --- a/sickbeard/providers/bitsnoop.py +++ b/sickbeard/providers/bitsnoop.py @@ -60,8 +60,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), - logger.DEBUG) + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) try: search_url = (self.urls['rss'], self.urls['search'] + search_string + '/s/d/1/?fmt=rss')[mode != 'RSS'] diff --git a/sickbeard/providers/bluetigers.py b/sickbeard/providers/bluetigers.py index 93af2e4a18..2e6ce79c4a 100644 --- a/sickbeard/providers/bluetigers.py +++ b/sickbeard/providers/bluetigers.py @@ -92,8 +92,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), - logger.DEBUG) + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) self.search_params['search'] = search_string diff --git a/sickbeard/providers/btdigg.py b/sickbeard/providers/btdigg.py index c41fe5cd76..4b6a0abe92 100644 --- a/sickbeard/providers/btdigg.py +++ b/sickbeard/providers/btdigg.py @@ -64,8 +64,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_params['q'] = search_string if mode != 'RSS': search_params['order'] = 0 - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), - logger.DEBUG) + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) else: search_params['order'] = 2 if self.custom_url: diff --git a/sickbeard/providers/cpasbien.py b/sickbeard/providers/cpasbien.py index d72a4ff411..039d2d141c 100644 --- a/sickbeard/providers/cpasbien.py +++ b/sickbeard/providers/cpasbien.py @@ -51,7 +51,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_url = self.url + '/recherche/' + search_string.replace('.', '-').replace(' ', '-') + '.html,trie-seeds-d' else: diff --git a/sickbeard/providers/danishbits.py b/sickbeard/providers/danishbits.py index e4d6415f2a..948e82e077 100644 --- a/sickbeard/providers/danishbits.py +++ b/sickbeard/providers/danishbits.py @@ -62,8 +62,8 @@ def login(self): return True login_params = { - 'username': self.username.encode('utf-8'), - 'password': self.password.encode('utf-8'), + 'username': self.username, + 'password': self.password, 'keeplogged': 1, 'langlang': '', 'login': 'Login', @@ -103,7 +103,7 @@ def process_column_header(td): result = td.img.get('title') if not result: result = td.get_text(strip=True) - return result.encode('utf-8') + return result for mode in search_strings: items = [] @@ -112,7 +112,7 @@ def process_column_header(td): for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_params['search'] = search_string diff --git a/sickbeard/providers/elitetorrent.py b/sickbeard/providers/elitetorrent.py index 4fc3b1a085..396b87a77f 100644 --- a/sickbeard/providers/elitetorrent.py +++ b/sickbeard/providers/elitetorrent.py @@ -50,7 +50,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man results = [] lang_info = '' if not ep_obj or not ep_obj.show else ep_obj.show.lang - ''' + """ Search query: http://www.elitetorrent.net/torrents.php?cat=4&modo=listado&orden=fecha&pag=1&buscar=fringe @@ -59,7 +59,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man orden = fecha => order buscar => Search show pag = 1 => page number - ''' + """ search_params = { 'cat': 4, @@ -80,7 +80,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_string = re.sub(r'S0*(\d*)E(\d*)', r'\1x\2', search_string) diff --git a/sickbeard/providers/extratorrent.py b/sickbeard/providers/extratorrent.py index 1cb1f4a0ce..ef71dd763c 100644 --- a/sickbeard/providers/extratorrent.py +++ b/sickbeard/providers/extratorrent.py @@ -60,8 +60,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), - logger.DEBUG) + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) self.search_params.update({'type': ('search', 'rss')[mode == 'RSS'], 'search': search_string}) search_url = self.urls['rss'] if not self.custom_url else self.urls['rss'].replace(self.urls['index'], self.custom_url) diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index 59b7c10ed1..3ccc1f490c 100644 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -116,7 +116,7 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many for search_string in search_params[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_url = self.urls['search'] % (freeleech, search_string) diff --git a/sickbeard/providers/gftracker.py b/sickbeard/providers/gftracker.py index 28e4943a16..b9458c8f2e 100644 --- a/sickbeard/providers/gftracker.py +++ b/sickbeard/providers/gftracker.py @@ -125,7 +125,7 @@ def process_column_header(td): for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_params['search'] = search_string diff --git a/sickbeard/providers/hdspace.py b/sickbeard/providers/hdspace.py index 9730291e00..1b21e229f8 100644 --- a/sickbeard/providers/hdspace.py +++ b/sickbeard/providers/hdspace.py @@ -104,7 +104,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_url = self.urls['search'] % '' if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) data = self.get_url(search_url, returns='text') diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index b966126e0c..1734361005 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -94,7 +94,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if mode != 'RSS': search_url = self.urls['search'] % (quote_plus(search_string), self.categories) - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) else: search_url = self.urls['rss'] % self.categories diff --git a/sickbeard/providers/hounddawgs.py b/sickbeard/providers/hounddawgs.py index c2afe626ca..4f5eb9d704 100644 --- a/sickbeard/providers/hounddawgs.py +++ b/sickbeard/providers/hounddawgs.py @@ -105,7 +105,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) self.search_params['searchstr'] = search_string diff --git a/sickbeard/providers/ilovetorrents.py b/sickbeard/providers/ilovetorrents.py index 0e6a027b93..b49b9e88ef 100644 --- a/sickbeard/providers/ilovetorrents.py +++ b/sickbeard/providers/ilovetorrents.py @@ -18,6 +18,8 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import unicode_literals + import re import traceback from requests.compat import urljoin @@ -26,45 +28,43 @@ from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser -from sickrage.helper.common import convert_size, try_int +from sickrage.helper.common import convert_size from sickrage.providers.torrent.TorrentProvider import TorrentProvider class ILoveTorrentsProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes def __init__(self): - + # Provider Init - TorrentProvider.__init__(self, "ILoveTorrents") - + TorrentProvider.__init__(self, 'ILoveTorrents') + # URLs self.url = 'https://www.ilovetorrents.me/' self.urls = { - 'login': urljoin(self.url, "takelogin.php"), - 'detail': urljoin(self.url, "details.php?id=%s"), - 'search': urljoin(self.url, "browse.php"), - 'download': urljoin(self.url, "%s"), + 'login': urljoin(self.url, 'takelogin.php'), + 'detail': urljoin(self.url, 'details.php?id=%s'), + 'search': urljoin(self.url, 'browse.php'), + 'download': urljoin(self.url, '%s'), } - # Credentials self.username = None self.password = None - - # Torrent Stats + + # Torrent Stats self.minseed = None self.minleech = None - + # Proper Strings - self.proper_strings = ["PROPER", "REPACK", "REAL"] + self.proper_strings = ['PROPER', 'REPACK', 'REAL'] # Cache self.cache = tvcache.TVCache(self) - def _check_auth(self): if not self.username or not self.password: - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log(u'Invalid username or password. Check your settings', logger.WARNING) return True @@ -81,11 +81,11 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log(u'Unable to connect to provider', logger.WARNING) return False if re.search('Username or password incorrect', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log(u'Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -95,15 +95,15 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if not self.login(): return results search_params = { - "cat": 0 + 'cat': 0 } for mode in search_strings: items = [] - logger.log(u"Search Mode: {0}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {0}".format - (search_string.decode("utf-8")), logger.DEBUG) + logger.log('Search string: {0}'.format + (search_string), logger.DEBUG) search_params['search'] = search_string @@ -112,13 +112,13 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man continue try: - with BS4Parser(data, "html.parser") as html: + with BS4Parser(data, 'html.parser') as html: torrent_table = html.find('table', class_='koptekst') torrent_rows = torrent_table('tr') if torrent_table else [] # Continue only if one Release is found if len(torrent_rows) < 2: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log(u'Data returned from provider does not contain any torrents', logger.DEBUG) continue for result in torrent_rows[1:]: @@ -145,18 +145,18 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format (title, seeders), logger.DEBUG) continue - #Use same failsafe as Bitsoup + # Use same failsafe as Bitsoup if seeders >= 32768 or leechers >= 32768: continue item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} if mode != 'RSS': - logger.log(u"Found result: {0} with {1} seeders and {2} leechers".format(title, seeders, leechers), logger.DEBUG) + logger.log(u'Found result: {0} with {1} seeders and {2} leechers'.format(title, seeders, leechers), logger.DEBUG) items.append(item) except Exception: - logger.log(u"Failed parsing provider. Traceback: {0}".format(traceback.format_exc()), logger.WARNING) + logger.log(u'Failed parsing provider. Traceback: {0}'.format(traceback.format_exc()), logger.WARNING) results += items diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py index 7b3b178863..6adf97079b 100644 --- a/sickbeard/providers/iptorrents.py +++ b/sickbeard/providers/iptorrents.py @@ -18,14 +18,18 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import unicode_literals + import re +import traceback + from requests.utils import dict_from_cookiejar from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser -from sickrage.helper.exceptions import AuthException, ex -from sickrage.helper.common import convert_size, try_int +from sickrage.helper.exceptions import AuthException +from sickrage.helper.common import convert_size from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -33,7 +37,7 @@ class IPTorrentsProvider(TorrentProvider): # pylint: disable=too-many-instance- def __init__(self): - TorrentProvider.__init__(self, "IPTorrents") + TorrentProvider.__init__(self, 'IPTorrents') self.username = None self.password = None @@ -54,7 +58,7 @@ def __init__(self): def _check_auth(self): if not self.username or not self.password: - raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.") + raise AuthException('Your authentication credentials for {0} are missing, check your config.'.format(self.name)) return True @@ -69,17 +73,17 @@ def login(self): self.get_url(self.urls['login'], returns='text') response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False # Invalid username and password combination if re.search('Invalid username and password combination', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False # You tried too often, please try again after 2 hours! if re.search('You tried too often', response): - logger.log(u"You tried too often, please try again after 2 hours! Disable IPTorrents for at least 2 hours", logger.WARNING) + logger.log('You tried too often, please try again after 2 hours! Disable IPTorrents for at least 2 hours', logger.WARNING) return False return True @@ -93,11 +97,11 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many for mode in search_params: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_params[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) # URL with 50 tv-show results, or max 150 if adjusted in IPTorrents profile @@ -112,11 +116,11 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many data = re.sub(r'(?im)', '', data, 0) with BS4Parser(data, 'html5lib') as html: if not html: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue if html.find(text='No Torrents Found!'): - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue torrent_table = html.find('table', attrs={'class': 'torrents'}) @@ -124,7 +128,7 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many # Continue only if one Release is found if len(torrents) < 2: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue for result in torrents[1:]: @@ -135,28 +139,31 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many leechers = int(result.find('td', attrs={'class': 'ac t_leechers'}).text) torrent_size = result('td')[5].text size = convert_size(torrent_size) or -1 - except (AttributeError, TypeError, KeyError): - continue - if not all([title, download_url]): - continue + if not all([title, download_url]): + continue - # Filter unseeded torrent - if seeders < min(self.minseed, 1): - if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format - (title, seeders), logger.DEBUG) - continue - - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != 'RSS': - logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG) + # Filter unseeded torrent + if seeders < min(self.minseed, 1): + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + (title, seeders), logger.DEBUG) + continue - items.append(item) + item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, + 'leechers': leechers, 'pubdate': None, 'hash': None} + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format(title, seeders, leechers), logger.DEBUG) - except Exception as e: - logger.log(u"Failed parsing provider. Error: %r" % ex(e), logger.ERROR) + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue + except Exception: + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) results += items return results diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py index 7d93428388..10b6fc6cc3 100644 --- a/sickbeard/providers/kat.py +++ b/sickbeard/providers/kat.py @@ -18,6 +18,7 @@ from __future__ import unicode_literals +import traceback import validators from requests.compat import urljoin from sickbeard.bs4_parser import BS4Parser @@ -33,7 +34,7 @@ class KatProvider(TorrentProvider): # pylint: disable=too-many-instance-attribu def __init__(self): - TorrentProvider.__init__(self, "KickAssTorrents") + TorrentProvider.__init__(self, 'KickAssTorrents') self.public = True @@ -41,95 +42,98 @@ def __init__(self): self.minseed = None self.minleech = None - self.url = "https://kat.cr" - self.urls = {"search": urljoin(self.url, "%s/")} + self.url = 'https://kat.cr' + self.urls = {'search': urljoin(self.url, '%s/')} self.custom_url = None - self.cache = tvcache.TVCache(self, search_params={"RSS": ["tv", "anime"]}) + self.cache = tvcache.TVCache(self, search_params={'RSS': ['tv', 'anime']}) def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals, too-many-statements results = [] anime = (self.show and self.show.anime) or (ep_obj and ep_obj.show and ep_obj.show.anime) or False search_params = { - "q": "", - "field": "seeders", - "sorder": "desc", - "rss": 1, - "category": ("tv", "anime")[anime] + 'q': '', + 'field': 'seeders', + 'sorder': 'desc', + 'rss': 1, + 'category': ('tv', 'anime')[anime] } for mode in search_strings: items = [] - logger.log("Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: - search_params["q"] = search_string if mode != "RSS" else "" - search_params["field"] = "seeders" if mode != "RSS" else "time_add" + search_params['q'] = search_string if mode != 'RSS' else '' + search_params['field'] = 'seeders' if mode != 'RSS' else 'time_add' - if mode != "RSS": - logger.log("Search string: {}".format(search_string.decode("utf-8")), + if mode != 'RSS': + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) - search_url = self.urls["search"] % ("usearch" if mode != "RSS" else search_string) + search_url = self.urls['search'] % ('usearch' if mode != 'RSS' else search_string) if self.custom_url: if not validators.url(self.custom_url): - logger.log("Invalid custom url: {}".format(self.custom_url), logger.WARNING) + logger.log('Invalid custom url: {0}'.format(self.custom_url), logger.WARNING) return results search_url = urljoin(self.custom_url, search_url.split(self.url)[1]) - data = self.get_url(search_url, params=search_params, returns="text") + data = self.get_url(search_url, params=search_params, returns='text') if not data: - logger.log("URL did not return data, maybe try a custom url, or a different one", logger.DEBUG) + logger.log('URL did not return data, maybe try a custom url, or a different one', logger.DEBUG) continue - if not data.startswith(". +from __future__ import unicode_literals + import re +import traceback from requests.compat import urljoin from requests.utils import dict_from_cookiejar @@ -36,7 +39,7 @@ class MoreThanTVProvider(TorrentProvider): # pylint: disable=too-many-instance- def __init__(self): # Provider Init - TorrentProvider.__init__(self, "MoreThanTV") + TorrentProvider.__init__(self, 'MoreThanTV') # Credentials self.username = None @@ -65,7 +68,7 @@ def __init__(self): def _check_auth(self): if not self.username or not self.password: - raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.") + raise AuthException('Your authentication credentials for {0} are missing, check your config.'.format(self.name)) return True @@ -82,11 +85,11 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if re.search('Your username or password was incorrect.', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -119,19 +122,19 @@ def process_column_header(td): for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_params['searchstr'] = search_string data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue with BS4Parser(data, 'html5lib') as html: @@ -140,7 +143,7 @@ def process_column_header(td): # Continue only if at least one Release is found if len(torrent_rows) < 2: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue labels = [process_column_header(label) for label in torrent_rows[0]('td')] @@ -164,23 +167,25 @@ def process_column_header(td): # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the" - u" minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the" + " minimum seeders: {0}. Seeders: {1})".format (title, seeders), logger.DEBUG) continue torrent_size = cells[labels.index('Size')].get_text(strip=True) size = convert_size(torrent_size, units=units) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, + 'leechers': leechers, 'pubdate': None, 'hash': None} if mode != 'RSS': - logger.log(u"Found result: {0} with {1} seeders and {2} leechers".format + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) items.append(item) - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue - results += items return results diff --git a/sickbeard/providers/newpct.py b/sickbeard/providers/newpct.py index b278a81b89..5a980739d4 100644 --- a/sickbeard/providers/newpct.py +++ b/sickbeard/providers/newpct.py @@ -19,8 +19,10 @@ # along with SickRage. If not, see . from __future__ import unicode_literals + from requests.compat import urljoin import re +import traceback from sickbeard import helpers from sickbeard import logger, tvcache @@ -69,7 +71,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log('Search Mode: {}'.format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) # Only search if user conditions are true if self.onlyspasearch and lang_info != 'es' and mode != 'RSS': @@ -80,7 +82,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {}'.format(search_string.decode('utf-8')), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_params['q'] = search_string @@ -119,10 +121,12 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man size = convert_size(torrent_size) or -1 item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} if mode != 'RSS': - logger.log('Found result: {}'.format(title), logger.DEBUG) + logger.log('Found result: {0}'.format(title), logger.DEBUG) items.append(item) - except (AttributeError, TypeError): + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue results += items @@ -163,14 +167,14 @@ def download_result(self, result): if url_torrent.startswith('http'): self.headers.update({'Referer': '/'.join(url_torrent.split('/')[:3]) + '/'}) - logger.log('Downloading a result from {}'.format(url)) + logger.log('Downloading a result from {0}'.format(url)) if helpers.download_file(url_torrent, filename, session=self.session, headers=self.headers): if self._verify_download(filename): - logger.log('Saved result to {}'.format(filename), logger.INFO) + logger.log('Saved result to {0}'.format(filename), logger.INFO) return True else: - logger.log('Could not download {}'.format(url), logger.WARNING) + logger.log('Could not download {0}'.format(url), logger.WARNING) helpers.remove_file_failed(filename) if urls: @@ -199,8 +203,8 @@ def _processTitle(title): # Language title = re.sub(r'\[Spanish[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE) title = re.sub(r'\[Castellano[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE) - title = re.sub(ur'\[Español[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE) - title = re.sub(ur'\[AC3 5\.1 Español[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE) + title = re.sub(r'\[Español[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE) + title = re.sub(r'\[AC3 5\.1 Español[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE) title += '-NEWPCT' diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index a2b9da4314..a13766e0ef 100644 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -125,7 +125,6 @@ def get_providers_list(data): providers_dict[default.name].enable_daily = default.enable_daily providers_dict[default.name].enable_backlog = default.enable_backlog providers_dict[default.name].enable_manualsearch = default.enable_manualsearch - providers_dict[default.name].catIDs = default.catIDs return [provider for provider in providers_list if provider] @@ -174,13 +173,13 @@ def get_newznab_categories(self, just_caps=False): data = self.get_url(urljoin(self.url, 'api'), params=url_params, returns='text') if not data: - error_string = 'Error getting caps xml for [{}]'.format(self.name) + error_string = 'Error getting caps xml for [{0}]'.format(self.name) logger.log(error_string, logger.WARNING) return False, return_categories, error_string with BS4Parser(data, 'html5lib') as html: if not html.find('categories'): - error_string = 'Error parsing caps xml for [{}]'.format(self.name) + error_string = 'Error parsing caps xml for [{0}]'.format(self.name) logger.log(error_string, logger.DEBUG) return False, return_categories, error_string @@ -276,12 +275,11 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if not self._check_auth(): return results - # gingadaddy has no caps. - if not self.caps and 'gingadaddy' not in self.url: + # For providers that don't have no caps, or for which the t=caps is not working. + if not self.caps and all(provider not in self.url for provider in ['gingadaddy', 'usenet-crawler']): self.get_newznab_categories(just_caps=True) - - if not self.caps and 'gingadaddy' not in self.url: - return results + if not self.caps: + return results for mode in search_strings: torznab = False @@ -312,10 +310,10 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_params.pop('ep', '') items = [] - logger.log('Search Mode: {}'.format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {}'.format(search_string.decode('utf-8')), logger.DEBUG) + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) if search_params['t'] != 'tvsearch': search_params['q'] = search_string diff --git a/sickbeard/providers/norbits.py b/sickbeard/providers/norbits.py index 15983351d5..245909a8b9 100644 --- a/sickbeard/providers/norbits.py +++ b/sickbeard/providers/norbits.py @@ -20,6 +20,7 @@ from __future__ import unicode_literals +import traceback import json from requests.compat import urlencode @@ -74,12 +75,12 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many for mode in search_params: items = [] - logger.log('Search Mode: {}'.format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_params[mode]: if mode != 'RSS': - logger.log('Search string: {}'.format - (search_string.decode('utf-8')), logger.DEBUG) + logger.log('Search string: {0}'.format + (search_string), logger.DEBUG) post_data = { 'username': self.username, @@ -103,33 +104,38 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many 'not parsing it', logger.ERROR) for item in json_items.get('torrents', []): - title = item.pop('name', '') - download_url = '{}{}'.format( - self.urls['download'], - urlencode({'id': item.pop('id', ''), 'passkey': self.passkey})) - - if not all([title, download_url]): - continue - - seeders = try_int(item.pop('seeders', 0)) - leechers = try_int(item.pop('leechers', 0)) - - if seeders < min(self.minseed, 1): - logger.log('Discarding torrent because it does not meet ' - 'the minimum seeders: {0}. Seeders: {1})'.format - (title, seeders), logger.DEBUG) + try: + title = item.pop('name', '') + download_url = '{0}{1}'.format( + self.urls['download'], + urlencode({'id': item.pop('id', ''), 'passkey': self.passkey})) + + if not all([title, download_url]): + continue + + seeders = try_int(item.pop('seeders', 0)) + leechers = try_int(item.pop('leechers', 0)) + + if seeders < min(self.minseed, 1): + logger.log('Discarding torrent because it does not meet ' + 'the minimum seeders: {0}. Seeders: {1})'.format + (title, seeders), logger.DEBUG) + continue + + info_hash = item.pop('info_hash', '') + size = convert_size(item.pop('size', -1), -1) + + item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': info_hash} + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format( + title, seeders, leechers), logger.DEBUG) + + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue - info_hash = item.pop('info_hash', '') - size = convert_size(item.pop('size', -1), -1) - - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': info_hash} - if mode != 'RSS': - logger.log('Found result: {0} with {1} seeders and {2} leechers'.format( - title, seeders, leechers), logger.DEBUG) - - items.append(item) - results += items return results diff --git a/sickbeard/providers/nyaatorrents.py b/sickbeard/providers/nyaatorrents.py index d461ec56ab..cd365c9c33 100644 --- a/sickbeard/providers/nyaatorrents.py +++ b/sickbeard/providers/nyaatorrents.py @@ -21,6 +21,7 @@ from __future__ import unicode_literals import re +import traceback from sickbeard import logger, tvcache @@ -55,11 +56,11 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log(u'Search Mode: {}'.format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u'Search string: {}'.format - (search_string.decode('utf-8')), logger.DEBUG) + logger.log('Search string: {0}'.format + (search_string), logger.DEBUG) search_params = { 'page': 'rss', @@ -84,7 +85,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man item_info = self.regex.search(curItem['summary']) if not item_info: - logger.log('There was a problem parsing an item summary, skipping: {}'.format + logger.log('There was a problem parsing an item summary, skipping: {0}'.format (title), logger.DEBUG) continue @@ -100,7 +101,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man continue if self.confirmed and not verified and mode != 'RSS': - logger.log('Found result {} but that doesn\'t seem like a verified result so I\'m ignoring it'.format + logger.log("Found result {0} but that doesn't seem like a verified result so I'm ignoring it".format (title), logger.DEBUG) continue @@ -111,8 +112,10 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man (title, seeders, leechers), logger.DEBUG) items.append(result) - except StandardError: - continue + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/omgwtfnzbs.py b/sickbeard/providers/omgwtfnzbs.py index 9f186bd60c..5778989066 100644 --- a/sickbeard/providers/omgwtfnzbs.py +++ b/sickbeard/providers/omgwtfnzbs.py @@ -18,7 +18,9 @@ # along with Medusa. If not, see . from __future__ import unicode_literals + import re +import traceback import sickbeard from sickbeard import logger, tvcache @@ -83,7 +85,7 @@ def _get_size(self, item): units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'] summary = item.get('summary') if summary: - size_match = re.search(ur'Size[^\d]*([0-9.]*.[A-Z]*)', summary) + size_match = re.search(r'Size[^\d]*([0-9.]*.[A-Z]*)', summary) size = convert_size(size_match.group(1), units=units) or -1 if size_match else -1 return try_int(size) @@ -103,11 +105,11 @@ def search(self, search_strings, age=0, ep_obj=None): for mode in search_strings: items = [] - logger.log('Search Mode: {}'.format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: search_params['search'] = search_string if mode != 'RSS': - logger.log('Search string: {}'.format(search_string.decode('utf-8')), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) data = self.get_url(self.urls['api'], params=search_params, returns='json') @@ -119,11 +121,16 @@ def search(self, search_strings, age=0, ep_obj=None): continue for item in data: - if not self._get_title_and_url(item): - continue - - logger.log('Found result: {}'.format(item.get('title')), logger.DEBUG) - items.append(item) + try: + if not self._get_title_and_url(item): + continue + + logger.log('Found result: {0}'.format(item.get('title')), logger.DEBUG) + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/pretome.py b/sickbeard/providers/pretome.py index 0f2c9cb82e..591d385cb2 100644 --- a/sickbeard/providers/pretome.py +++ b/sickbeard/providers/pretome.py @@ -18,6 +18,8 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import unicode_literals + import re import traceback from requests.compat import quote @@ -26,7 +28,7 @@ from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser -from sickrage.helper.common import convert_size, try_int +from sickrage.helper.common import convert_size from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -34,7 +36,7 @@ class PretomeProvider(TorrentProvider): # pylint: disable=too-many-instance-att def __init__(self): - TorrentProvider.__init__(self, "Pretome") + TorrentProvider.__init__(self, 'Pretome') self.username = None self.password = None @@ -50,7 +52,7 @@ def __init__(self): self.url = self.urls['base_url'] - self.categories = "&st=1&cat%5B%5D=7" + self.categories = '&st=1&cat%5B%5D=7' self.proper_strings = ['PROPER', 'REPACK'] @@ -59,7 +61,7 @@ def __init__(self): def _check_auth(self): if not self.username or not self.password or not self.pin: - logger.log(u"Invalid username or password or pin. Check your settings", logger.WARNING) + logger.log('Invalid username or password or pin. Check your settings', logger.WARNING) return True @@ -73,11 +75,11 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if re.search('Username or password incorrect', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -89,11 +91,11 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many for mode in search_params: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_params[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_url = self.urls['search'] % (quote(search_string), self.categories) @@ -105,26 +107,26 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many try: with BS4Parser(data, 'html5lib') as html: # Continue only if one Release is found - empty = html.find('h2', text="No .torrents fit this filter criteria") + empty = html.find('h2', text='No .torrents fit this filter criteria') if empty: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue torrent_table = html.find('table', attrs={'style': 'border: none; width: 100%;'}) if not torrent_table: - logger.log(u"Could not find table of torrents", logger.ERROR) + logger.log('Could not find table of torrents', logger.ERROR) continue torrent_rows = torrent_table('tr', attrs={'class': 'browse'}) for result in torrent_rows: - cells = result('td') - size = None - link = cells[1].find('a', attrs={'style': 'font-size: 1.25em; font-weight: bold;'}) + try: + cells = result('td') + size = None + link = cells[1].find('a', attrs={'style': 'font-size: 1.25em; font-weight: bold;'}) - torrent_id = link['href'].replace('details.php?id=', '') + torrent_id = link['href'].replace('details.php?id=', '') - try: if link.get('title', ''): title = link['title'] else: @@ -139,27 +141,31 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many torrent_size = cells[7].text size = convert_size(torrent_size) or -1 - except (AttributeError, TypeError): - continue + if not all([title, download_url]): + continue - if not all([title, download_url]): - continue + # Filter unseeded torrent + if seeders < min(self.minseed, 1): + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + (title, seeders), logger.DEBUG) + continue - # Filter unseeded torrent - if seeders < min(self.minseed, 1): + item = {'title': title, 'link': download_url, 'size': size, 'seeders': + seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format - (title, seeders), logger.DEBUG) - continue + logger.log('Found result: %s with %s seeders and %s leechers' % (title, seeders, leechers), logger.DEBUG) - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != 'RSS': - logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG) + items.append(item) - items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue except Exception: - logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) results += items diff --git a/sickbeard/providers/rarbg.py b/sickbeard/providers/rarbg.py index 67fbbddc7d..e2915554d1 100644 --- a/sickbeard/providers/rarbg.py +++ b/sickbeard/providers/rarbg.py @@ -18,10 +18,10 @@ from __future__ import unicode_literals +import traceback import datetime import time -import sickbeard from sickbeard import logger, tvcache from sickbeard.indexers.indexer_config import INDEXER_TVDB @@ -33,7 +33,7 @@ class RarbgProvider(TorrentProvider): # pylint: disable=too-many-instance-attri def __init__(self): - TorrentProvider.__init__(self, "Rarbg") + TorrentProvider.__init__(self, 'Rarbg') self.public = True self.minseed = None @@ -44,10 +44,10 @@ def __init__(self): self.token_expires = None # Spec: https://torrentapi.org/apidocs_v2.txt - self.url = "https://rarbg.com" - self.urls = {"api": "http://torrentapi.org/pubapi_v2.php"} + self.url = 'https://rarbg.com' + self.urls = {'api': 'http://torrentapi.org/pubapi_v2.php'} - self.proper_strings = ["{{PROPER|REPACK}}"] + self.proper_strings = ['{{PROPER|REPACK}}'] self.cache = tvcache.TVCache(self, min_time=10) # only poll RARBG every 10 minutes max @@ -56,17 +56,17 @@ def login(self): return True login_params = { - "get_token": "get_token", - "format": "json", - "app_id": "sickrage2" + 'get_token': 'get_token', + 'format': 'json', + 'app_id': 'sickrage2' } - response = self.get_url(self.urls["api"], params=login_params, returns="json") + response = self.get_url(self.urls['api'], params=login_params, returns='json') if not response: - logger.log("Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False - self.token = response.get("token") + self.token = response.get('token') self.token_expires = datetime.datetime.now() + datetime.timedelta(minutes=14) if self.token else None return self.token is not None @@ -76,14 +76,14 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results search_params = { - "app_id": "sickrage2", - "category": "tv", - "min_seeders": try_int(self.minseed), - "min_leechers": try_int(self.minleech), - "limit": 100, - "format": "json_extended", - "ranked": try_int(self.ranked), - "token": self.token, + 'app_id': 'sickrage2', + 'category': 'tv', + 'min_seeders': try_int(self.minseed), + 'min_leechers': try_int(self.minleech), + 'limit': 100, + 'format': 'json_extended', + 'ranked': try_int(self.ranked), + 'token': self.token, } if ep_obj is not None: @@ -95,25 +95,25 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log("Search Mode: {}".format(mode), logger.DEBUG) - if mode == "RSS": - search_params["sort"] = "last" - search_params["mode"] = "list" - search_params.pop("search_string", None) - search_params.pop("search_tvdb", None) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + if mode == 'RSS': + search_params['sort'] = 'last' + search_params['mode'] = 'list' + search_params.pop('search_string', None) + search_params.pop('search_tvdb', None) else: - search_params["sort"] = self.sorting if self.sorting else "seeders" - search_params["mode"] = "search" + search_params['sort'] = self.sorting if self.sorting else 'seeders' + search_params['mode'] = 'search' if ep_indexer == INDEXER_TVDB and ep_indexerid: - search_params["search_tvdb"] = ep_indexerid + search_params['search_tvdb'] = ep_indexerid else: - search_params.pop("search_tvdb", None) + search_params.pop('search_tvdb', None) for search_string in search_strings[mode]: - if mode != "RSS": - search_params["search_string"] = search_string - logger.log("Search string: {}".format(search_string.decode("utf-8")), + if mode != 'RSS': + search_params['search_string'] = search_string + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) # Check if token is still valid before search @@ -124,57 +124,59 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Changing to 5 because of server clock desync time.sleep(5) - data = self.get_url(self.urls["api"], params=search_params, returns="json") + data = self.get_url(self.urls['api'], params=search_params, returns='json') if not isinstance(data, dict): - logger.log("No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue - error = data.get("error") - error_code = data.get("error_code") - # Don't log when {"error":"No results found","error_code":20} + error = data.get('error') + error_code = data.get('error_code') + # Don't log when {'error':'No results found','error_code':20} # List of errors: https://github.com/rarbg/torrentapi/issues/1#issuecomment-114763312 if error: if error_code == 5: # 5 = Too many requests per second - logger.log("{0}. Error code: {1}".format(error, error_code), logger.INFO) + logger.log('{0}. Error code: {1}'.format(error, error_code), logger.INFO) elif error_code not in (14, 20): # 14 = Cant find thetvdb in database. Are you sure this thetvdb exists? # 20 = No results found - logger.log("{0}. Error code: {1}".format(error, error_code), logger.WARNING) + logger.log('{0}. Error code: {1}'.format(error, error_code), logger.WARNING) continue - torrent_results = data.get("torrent_results") + torrent_results = data.get('torrent_results') if not torrent_results: - logger.log("Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue for item in torrent_results: try: - title = item.pop("title") - download_url = item.pop("download") + title = item.pop('title') + download_url = item.pop('download') if not all([title, download_url]): continue - seeders = item.pop("seeders") - leechers = item.pop("leechers") + seeders = item.pop('seeders') + leechers = item.pop('leechers') if seeders < min(self.minseed, 1): - if mode != "RSS": + if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" " minimum seeders: {0}. Seeders: {1})".format (title, seeders), logger.DEBUG) continue - torrent_size = item.pop("size", -1) + torrent_size = item.pop('size', -1) size = convert_size(torrent_size) or -1 - if mode != "RSS": - logger.log("Found result: {0} with {1} seeders and {2} leechers".format + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) result = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} items.append(result) - except StandardError: - continue + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/rsstorrent.py b/sickbeard/providers/rsstorrent.py index 4a8f3ad8d9..bfc43c1f48 100644 --- a/sickbeard/providers/rsstorrent.py +++ b/sickbeard/providers/rsstorrent.py @@ -155,14 +155,14 @@ def validateRSS(self): # pylint: disable=too-many-return-statements if self.cookies: cookie_validator = re.compile(r'^(\w+=\w+)(;\w+=\w+)*$') if not cookie_validator.match(self.cookies): - return False, 'Cookie is not correctly formatted: {}'.format(self.cookies) + return False, 'Cookie is not correctly formatted: {0}'.format(self.cookies) add_dict_to_cookiejar(self.session.cookies, dict(x.rsplit('=', 1) for x in self.cookies.split(';'))) # pylint: disable=protected-access # Access to a protected member of a client class data = self.cache._getRSSData()['entries'] if not data: - return False, 'No items found in the RSS feed {}'.format(self.url) + return False, 'No items found in the RSS feed {0}'.format(self.url) title, url = self._get_title_and_url(data[0]) @@ -180,12 +180,12 @@ def validateRSS(self): # pylint: disable=too-many-return-statements bdecode(torrent_file) except Exception as error: self.dumpHTML(torrent_file) - return False, 'Torrent link is not a valid torrent file: {}'.format(ex(error)) + return False, 'Torrent link is not a valid torrent file: {0}'.format(ex(error)) return True, 'RSS feed Parsed correctly' except Exception as error: - return False, 'Error when trying to load RSS: {}'.format(ex(error)) + return False, 'Error when trying to load RSS: {0}'.format(ex(error)) @staticmethod def dumpHTML(data): @@ -197,10 +197,10 @@ def dumpHTML(data): fileOut.close() helpers.chmodAsParent(dumpName) except IOError as error: - logger.log('Unable to save the file: {}'.format(ex(error)), logger.ERROR) + logger.log('Unable to save the file: {0}'.format(ex(error)), logger.ERROR) return False - logger.log('Saved custom_torrent html dump {} '.format(dumpName), logger.INFO) + logger.log('Saved custom_torrent html dump {0} '.format(dumpName), logger.INFO) return True diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py index 652dd77de2..c58df97af1 100644 --- a/sickbeard/providers/scc.py +++ b/sickbeard/providers/scc.py @@ -18,7 +18,10 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import unicode_literals + import re +import traceback import time from requests.compat import urljoin, quote @@ -37,7 +40,7 @@ class SCCProvider(TorrentProvider): # pylint: disable=too-many-instance-attribu def __init__(self): - TorrentProvider.__init__(self, "SceneAccess") + TorrentProvider.__init__(self, 'SceneAccess') self.username = None self.password = None @@ -74,12 +77,12 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if re.search(r'Username or password incorrect', response) \ or re.search(r'SceneAccess \| Login', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -97,10 +100,10 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] if mode != 'RSS': - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_url = self.urls['search'] % (quote(search_string), self.categories[mode]) @@ -109,7 +112,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man data = self.get_url(search_url, returns='text') time.sleep(cpu_presets[sickbeard.CPU_PRESET]) except Exception as e: - logger.log(u"Unable to fetch data. Error: %s" % repr(e), logger.WARNING) + logger.log('Unable to fetch data. Error: %s' % repr(e), logger.WARNING) if not data: continue @@ -120,7 +123,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Continue only if at least one Release is found if len(torrent_rows) < 2: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue for result in torrent_table('tr')[1:]: @@ -134,14 +137,16 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man data = self.get_url(urljoin(self.url, link['href']), returns='text') if data: with BS4Parser(data) as details_html: - title = re.search('(?<=").+(?. from __future__ import unicode_literals -import re +import traceback from requests.compat import urljoin from requests.utils import dict_from_cookiejar from sickbeard import logger, tvcache -from sickbeard.bs4_parser import BS4Parser -from sickrage.helper.common import convert_size, try_int +from sickrage.helper.common import try_int from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -36,7 +35,7 @@ class SceneEliteProvider(TorrentProvider): # pylint: disable=too-many-instance- def __init__(self): # Provider Init - TorrentProvider.__init__(self, "SceneElite") + TorrentProvider.__init__(self, 'SceneElite') # Credentials self.username = None @@ -48,16 +47,16 @@ def __init__(self): self.freeleech = None # URLs - self.url = "https://sceneelite.org/" + self.url = 'https://sceneelite.org/' self.urls = { - "login": urljoin(self.url, "/api/v1/auth"), - "search": urljoin(self.url, "/api/v1/torrents"), - "download": urljoin(self.url, "/api/v1/torrents/download/"), + 'login': urljoin(self.url, '/api/v1/auth'), + 'search': urljoin(self.url, '/api/v1/torrents'), + 'download': urljoin(self.url, '/api/v1/torrents/download/'), } # Proper Strings - self.proper_strings = ["PROPER", "REPACK", "REAL"] - cache_params = {"RSS": [""]} + self.proper_strings = ['PROPER', 'REPACK', 'REAL'] + cache_params = {'RSS': ['']} # Cache self.cache = tvcache.TVCache(self, min_time=0.1, search_params=cache_params) @@ -66,13 +65,13 @@ def login(self): return True login_params = { - "username": self.username, - "password": self.password + 'username': self.username, + 'password': self.password } - response = self.get_url(self.urls["login"], params=login_params, returns="json") + response = self.get_url(self.urls['login'], params=login_params, returns='json') if not response: - logger.log("Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False return True @@ -83,66 +82,68 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Search Params search_params = { - "extendedSearch": 'false', - "hideOld": 'false', - "index": '0', - "limit": '100', - "order": 'asc', - "page": 'search', - "sort": 'n', - "categories[0]": 3, - "categories[1]": 6, - "categories[2]": 7 + 'extendedSearch': 'false', + 'hideOld': 'false', + 'index': '0', + 'limit': '100', + 'order': 'asc', + 'page': 'search', + 'sort': 'n', + 'categories[0]': 3, + 'categories[1]': 6, + 'categories[2]': 7 } for mode in search_strings: items = [] - logger.log("Search Mode: {0}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: - if mode != "RSS": - logger.log("Search string: {0}".format - (search_string.decode("utf-8")), logger.DEBUG) - search_params["searchText"] = search_string - else: - search_params["page"] = 'last_seriebrowse' + if mode != 'RSS': + logger.log('Search string: {0}'.format + (search_string), logger.DEBUG) + search_params['searchText'] = search_string + else: + search_params['page'] = 'last_seriebrowse' results = [] - search_url = self.urls["search"] + search_url = self.urls['search'] try: - jdata = self.get_url(search_url, params=search_params, returns="json") + jdata = self.get_url(search_url, params=search_params, returns='json') except ValueError: - logger.log("No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue for torrent in jdata: try: - title = torrent.pop("name", "") - id = str(torrent.pop("id", "")) + title = torrent.pop('name', '') + id = str(torrent.pop('id', '')) if not id: continue - seeders = try_int(torrent.pop("seeders", ""), 1) - leechers = try_int(torrent.pop("leechers", ""), 0) - freeleech = torrent.pop("frileech") + seeders = try_int(torrent.pop('seeders', ''), 1) + leechers = try_int(torrent.pop('leechers', ''), 0) + freeleech = torrent.pop('frileech') if self.freeleech and freeleech != 1: continue - size = try_int(torrent.pop("size", ""), 0) - download_url = self.urls["download"] + id + size = try_int(torrent.pop('size', ''), 0) + download_url = self.urls['download'] + id # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format(title, seeders), logger.DEBUG) + logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format(title, seeders), logger.DEBUG) continue item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != "RSS": - logger.log("Found result: {0} with {1} seeders and {2} leechers".format + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) items.append(item) - except StandardError: - continue + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/scenetime.py b/sickbeard/providers/scenetime.py index 496c4f500b..d3d95ec870 100644 --- a/sickbeard/providers/scenetime.py +++ b/sickbeard/providers/scenetime.py @@ -20,6 +20,8 @@ import re +import traceback + from requests.compat import quote from requests.utils import dict_from_cookiejar @@ -34,7 +36,7 @@ class SceneTimeProvider(TorrentProvider): # pylint: disable=too-many-instance-a def __init__(self): - TorrentProvider.__init__(self, "SceneTime") + TorrentProvider.__init__(self, 'SceneTime') self.username = None self.password = None @@ -51,7 +53,7 @@ def __init__(self): self.url = self.urls['base_url'] - self.categories = "&c2=1&c43=13&c9=1&c63=1&c77=1&c79=1&c100=1&c101=1" + self.categories = '&c2=1&c43=13&c9=1&c63=1&c77=1&c79=1&c100=1&c101=1' def login(self): if any(dict_from_cookiejar(self.session.cookies).values()): @@ -62,11 +64,11 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if re.search('Username or password incorrect', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -78,11 +80,11 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many for mode in search_params: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_params[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_url = self.urls['search'] % (quote(search_string), self.categories) @@ -92,14 +94,14 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many continue with BS4Parser(data, 'html5lib') as html: - torrent_table = html.find('div', id="torrenttable") + torrent_table = html.find('div', id='torrenttable') torrent_rows = [] if torrent_table: - torrent_rows = torrent_table.select("tr") + torrent_rows = torrent_table.select('tr') # Continue only if one Release is found if len(torrent_rows) < 2: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue # Scenetime apparently uses different number of cells in #torrenttable based @@ -112,10 +114,10 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many cells = result('td') link = cells[labels.index('Name')].find('a') - torrent_id = link['href'].replace('details.php?id=', '').split("&")[0] + torrent_id = link['href'].replace('details.php?id=', '').split('&')[0] title = link.get_text(strip=True) - download_url = self.urls['download'] % (torrent_id, "%s.torrent" % title.replace(" ", ".")) + download_url = self.urls['download'] % (torrent_id, '%s.torrent' % title.replace(' ', '.')) seeders = try_int(cells[labels.index('Seeders')].get_text(strip=True)) leechers = try_int(cells[labels.index('Leechers')].get_text(strip=True)) @@ -123,24 +125,25 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many size = convert_size(torrent_size) or -1 - except (AttributeError, TypeError, KeyError, ValueError): - continue + if not all([title, download_url]): + continue - if not all([title, download_url]): - continue + # Filter unseeded torrent + if seeders < min(self.minseed, 1): + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + (title, seeders), logger.DEBUG) + continue - # Filter unseeded torrent - if seeders < min(self.minseed, 1): + item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format - (title, seeders), logger.DEBUG) - continue - - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != 'RSS': - logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG) + logger.log('Found result: %s with %s seeders and %s leechers' % (title, seeders, leechers), logger.DEBUG) - items.append(item) + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/speedcd.py b/sickbeard/providers/speedcd.py index 490f520759..2e78ad7755 100644 --- a/sickbeard/providers/speedcd.py +++ b/sickbeard/providers/speedcd.py @@ -18,7 +18,10 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import unicode_literals + import re +import traceback from requests.compat import urljoin from requests.utils import dict_from_cookiejar @@ -35,7 +38,7 @@ class SpeedCDProvider(TorrentProvider): # pylint: disable=too-many-instance-att def __init__(self): # Provider Init - TorrentProvider.__init__(self, "Speedcd") + TorrentProvider.__init__(self, 'Speedcd') # Credentials self.username = None @@ -70,11 +73,11 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if re.search('Incorrect username or Password. Please try again.', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -115,12 +118,12 @@ def process_column_header(td): for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_params['search'] = search_string @@ -136,7 +139,7 @@ def process_column_header(td): # Continue only if at least one Release is found if len(torrent_rows) < 2: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue labels = [process_column_header(label) for label in torrent_rows[0]('th')] @@ -157,19 +160,22 @@ def process_column_header(td): # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format(title, seeders), logger.DEBUG) + logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format(title, seeders), logger.DEBUG) continue torrent_size = cells[labels.index('Size')].get_text() torrent_size = torrent_size[:-2] + ' ' + torrent_size[-2:] size = convert_size(torrent_size, units=units) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, + 'leechers': leechers, 'pubdate': None, 'hash': None} if mode != 'RSS': - logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format(title, seeders, leechers), logger.DEBUG) items.append(item) - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue results += items diff --git a/sickbeard/providers/t411.py b/sickbeard/providers/t411.py index fe5f9ddb1e..65b2224797 100644 --- a/sickbeard/providers/t411.py +++ b/sickbeard/providers/t411.py @@ -18,6 +18,8 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import unicode_literals + from requests.auth import AuthBase import time import traceback @@ -69,7 +71,7 @@ def login(self): response = self.get_url(self.urls['login_page'], post_data=login_params, returns='json') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if response and 'token' in response: @@ -79,7 +81,7 @@ def login(self): self.session.auth = T411Auth(self.token) return True else: - logger.log(u"Token not found in authentication response", logger.WARNING) + logger.log('Token not found in authentication response', logger.WARNING) return False def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals, too-many-statements @@ -89,11 +91,11 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many for mode in search_params: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_params[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_urlS = ([self.urls['search'] % (search_string, u) for u in self.subcategories], [self.urls['rss']])[mode == 'RSS'] @@ -104,13 +106,13 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many try: if 'torrents' not in data and mode != 'RSS': - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue torrents = data['torrents'] if mode != 'RSS' else data if not torrents: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue for torrent in torrents: @@ -120,7 +122,7 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many try: title = torrent['name'] torrent_id = torrent['id'] - download_url = (self.urls['download'] % torrent_id).encode('utf8') + download_url = (self.urls['download'] % torrent_id) if not all([title, download_url]): continue @@ -132,27 +134,28 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format(title, seeders), logger.DEBUG) + logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format(title, seeders), logger.DEBUG) continue if self.confirmed and not verified and mode != 'RSS': - logger.log(u"Found result " + title + " but that doesn't seem like a verified result so I'm ignoring it", logger.DEBUG) + logger.log("Found result {0} but that doesn't seem like a verified result so I'm ignoring it".format(title), logger.DEBUG) continue size = convert_size(torrent_size) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, + 'leechers': leechers, 'pubdate': None, 'hash': None} if mode != 'RSS': - logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format(title, seeders, leechers), logger.DEBUG) items.append(item) - except Exception: - logger.log(u"Invalid torrent data, skipping result: %s" % torrent, logger.DEBUG) - logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.DEBUG) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue except Exception: - logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) + logger.log('Failed parsing provider. Traceback: %s' % traceback.format_exc(), logger.ERROR) results += items diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py index c7a89111a6..0799d0ee01 100644 --- a/sickbeard/providers/thepiratebay.py +++ b/sickbeard/providers/thepiratebay.py @@ -21,6 +21,7 @@ from __future__ import unicode_literals import re +import traceback import validators from requests.compat import urljoin @@ -36,7 +37,7 @@ class ThePirateBayProvider(TorrentProvider): # pylint: disable=too-many-instanc def __init__(self): # Provider Init - TorrentProvider.__init__(self, "ThePirateBay") + TorrentProvider.__init__(self, 'ThePirateBay') # Credentials self.public = True @@ -47,10 +48,10 @@ def __init__(self): self.confirmed = True # URLs - self.url = "https://thepiratebay.se" + self.url = 'https://thepiratebay.se' self.urls = { - "rss": urljoin(self.url, "browse/200"), - "search": urljoin(self.url, "s/"), # Needs trailing / + 'rss': urljoin(self.url, 'browse/200'), + 'search': urljoin(self.url, 's/'), # Needs trailing / } self.custom_url = None @@ -66,18 +67,18 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man https://pirateproxy.pl/s/?q=Game of Thrones&type=search&orderby=7&page=0&category=200 """ search_params = { - "q": "", - "type": "search", - "orderby": 7, - "page": 0, - "category": 200 + 'q': '', + 'type': 'search', + 'orderby': 7, + 'page': 0, + 'category': 200 } # Units - units = ["B", "KIB", "MIB", "GIB", "TIB", "PIB"] + units = ['B', 'KIB', 'MIB', 'GIB', 'TIB', 'PIB'] def process_column_header(th): - result = "" + result = '' if th.a: result = th.a.get_text(strip=True) if not result: @@ -86,81 +87,83 @@ def process_column_header(th): for mode in search_strings: items = [] - logger.log("Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: - search_url = self.urls["search"] if mode != "RSS" else self.urls["rss"] + search_url = self.urls['search'] if mode != 'RSS' else self.urls['rss'] if self.custom_url: if not validators.url(self.custom_url): - logger.log("Invalid custom url: {}".format(self.custom_url), logger.WARNING) + logger.log('Invalid custom url: {0}'.format(self.custom_url), logger.WARNING) return results search_url = urljoin(self.custom_url, search_url.split(self.url)[1]) - if mode != "RSS": - search_params["q"] = search_string - logger.log("Search string: {search}".format - (search=search_string.decode("utf-8")), logger.DEBUG) + if mode != 'RSS': + search_params['q'] = search_string + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) - data = self.get_url(search_url, params=search_params, returns="text") + data = self.get_url(search_url, params=search_params, returns='text') else: - data = self.get_url(search_url, returns="text") + data = self.get_url(search_url, returns='text') if not data: - logger.log("URL did not return data, maybe try a custom url, or a different one", logger.DEBUG) + logger.log('URL did not return data, maybe try a custom url, or a different one', logger.DEBUG) continue - with BS4Parser(data, "html5lib") as html: - torrent_table = html.find("table", id="searchResult") - torrent_rows = torrent_table("tr") if torrent_table else [] + with BS4Parser(data, 'html5lib') as html: + torrent_table = html.find('table', id='searchResult') + torrent_rows = torrent_table('tr') if torrent_table else [] # Continue only if at least one Release is found if len(torrent_rows) < 2: - logger.log("Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue - labels = [process_column_header(label) for label in torrent_rows[0]("th")] + labels = [process_column_header(label) for label in torrent_rows[0]('th')] # Skip column headers for result in torrent_rows[1:]: try: - cells = result("td") + cells = result('td') - title = result.find(class_="detName").get_text(strip=True) - download_url = result.find(title="Download this torrent using magnet")["href"] + self._custom_trackers - if "magnet:?" not in download_url: - logger.log("Invalid ThePirateBay proxy please try another one", logger.DEBUG) + title = result.find(class_='detName').get_text(strip=True) + download_url = result.find(title='Download this torrent using magnet')['href'] + self._custom_trackers + if 'magnet:?' not in download_url: + logger.log('Invalid ThePirateBay proxy please try another one', logger.DEBUG) continue if not all([title, download_url]): continue - seeders = try_int(cells[labels.index("SE")].get_text(strip=True)) - leechers = try_int(cells[labels.index("LE")].get_text(strip=True)) + seeders = try_int(cells[labels.index('SE')].get_text(strip=True)) + leechers = try_int(cells[labels.index('LE')].get_text(strip=True)) # Filter unseeded torrent if seeders < min(self.minseed, 1): - if mode != "RSS": - logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue # Accept Torrent only from Good People for every Episode Search - if self.confirmed and not result.find(alt=re.compile(r"VIP|Trusted")): - if mode != "RSS": - logger.log("Found result {} but that doesn't seem like a trusted result so I'm ignoring it".format(title), logger.DEBUG) + if self.confirmed and not result.find(alt=re.compile(r'VIP|Trusted')): + if mode != 'RSS': + logger.log("Found result {0} but that doesn't seem like a trusted result so I'm ignoring it".format(title), logger.DEBUG) continue # Convert size after all possible skip scenarios - torrent_size = cells[labels.index("Name")].find(class_="detDesc").get_text(strip=True).split(", ")[1] - torrent_size = re.sub(r"Size ([\d.]+).+([KMGT]iB)", r"\1 \2", torrent_size) + torrent_size = cells[labels.index('Name')].find(class_='detDesc').get_text(strip=True).split(', ')[1] + torrent_size = re.sub(r'Size ([\d.]+).+([KMGT]iB)', r'\1 \2', torrent_size) size = convert_size(torrent_size, units=units) or -1 item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != "RSS": - logger.log("Found result: {0} with {1} seeders and {2} leechers".format + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) items.append(item) - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue results += items diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index 8ccb4de307..776277f8ad 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -18,6 +18,8 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import unicode_literals + import re import traceback @@ -28,7 +30,7 @@ from sickbeard.common import Quality from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException -from sickrage.helper.common import convert_size, try_int +from sickrage.helper.common import convert_size from sickrage.helper.exceptions import AuthException from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -63,7 +65,7 @@ class TNTVillageProvider(TorrentProvider): # pylint: disable=too-many-instance- def __init__(self): - TorrentProvider.__init__(self, "TNTVillage") + TorrentProvider.__init__(self, 'TNTVillage') self._uid = None self._hash = None @@ -109,14 +111,14 @@ def __init__(self): self.proper_strings = ['PROPER', 'REPACK'] - self.categories = "cat=29" + self.categories = 'cat=29' self.cache = tvcache.TVCache(self, min_time=30) # only poll TNTVillage every 30 minutes max def _check_auth(self): if not self.username or not self.password: - raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.") + raise AuthException('Your authentication credentials for ' + self.name + ' are missing, check your config.') return True @@ -133,11 +135,11 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if re.search('Sono stati riscontrati i seguenti errori', response) or re.search('Connettiti', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -180,27 +182,27 @@ def _episodeQuality(torrent_rows): # pylint: disable=too-many-return-statements if img_all: for img_type in img_all: try: - file_quality = file_quality + " " + img_type['src'].replace("style_images/mkportal-636/", "").replace(".gif", "").replace(".png", "") + file_quality = file_quality + ' ' + img_type['src'].replace('style_images/mkportal-636/', '').replace('.gif', '').replace('.png', '') except Exception: - logger.log(u"Failed parsing quality. Traceback: %s" % traceback.format_exc(), logger.ERROR) + logger.log('Failed parsing quality. Traceback: %s' % traceback.format_exc(), logger.ERROR) else: file_quality = (torrent_rows('td'))[1].get_text() - logger.log(u"Episode quality: %s" % file_quality, logger.DEBUG) + logger.log('Episode quality: %s' % file_quality, logger.DEBUG) def checkName(options, func): return func([re.search(option, file_quality, re.I) for option in options]) - dvdOptions = checkName(["dvd", "dvdrip", "dvdmux", "DVD9", "DVD5"], any) - bluRayOptions = checkName(["BD", "BDmux", "BDrip", "BRrip", "Bluray"], any) - sdOptions = checkName(["h264", "divx", "XviD", "tv", "TVrip", "SATRip", "DTTrip", "Mpeg2"], any) - hdOptions = checkName(["720p"], any) - fullHD = checkName(["1080p", "fullHD"], any) + dvdOptions = checkName(['dvd', 'dvdrip', 'dvdmux', 'DVD9', 'DVD5'], any) + bluRayOptions = checkName(['BD', 'BDmux', 'BDrip', 'BRrip', 'Bluray'], any) + sdOptions = checkName(['h264', 'divx', 'XviD', 'tv', 'TVrip', 'SATRip', 'DTTrip', 'Mpeg2'], any) + hdOptions = checkName(['720p'], any) + fullHD = checkName(['1080p', 'fullHD'], any) if img_all: file_quality = (torrent_rows('td'))[1].get_text() - webdl = checkName(["webdl", "webmux", "webrip", "dl-webmux", "web-dlmux", "webdl-mux", "web-dl", "webdlmux", "dlmux"], any) + webdl = checkName(['webdl', 'webmux', 'webrip', 'dl-webmux', 'web-dlmux', 'webdl-mux', 'web-dl', 'webdlmux', 'dlmux'], any) if sdOptions and not dvdOptions and not fullHD and not hdOptions: return Quality.SDTV @@ -234,13 +236,13 @@ def _is_italian(self, torrent_rows): else: continue - if re.search("ita", name.split(sub)[0], re.I): - logger.log(u"Found Italian release: " + name, logger.DEBUG) + if re.search('ita', name.split(sub)[0], re.I): + logger.log('Found Italian release: ' + name, logger.DEBUG) italian = True break - if not subFound and re.search("ita", name, re.I): - logger.log(u"Found Italian release: " + name, logger.DEBUG) + if not subFound and re.search('ita', name, re.I): + logger.log('Found Italian release: ' + name, logger.DEBUG) italian = True return italian @@ -253,8 +255,8 @@ def _is_english(torrent_rows): return False english = False - if re.search("eng", name, re.I): - logger.log(u"Found English release: " + name, logger.DEBUG) + if re.search('eng', name, re.I): + logger.log('Found English release: ' + name, logger.DEBUG) english = True return english @@ -265,11 +267,11 @@ def _is_season_pack(name): try: parse_result = NameParser(tryIndexers=True).parse(name) except (InvalidNameException, InvalidShowException) as error: - logger.log(u"{}".format(error), logger.DEBUG) + logger.log('{0}'.format(error), logger.DEBUG) return False main_db_con = db.DBConnection() - sql_selection = "select count(*) as count from tv_episodes where showid = ? and season = ?" + sql_selection = 'select count(*) as count from tv_episodes where showid = ? and season = ?' episodes = main_db_con.select(sql_selection, [parse_result.show.indexerid, parse_result.season_number]) if int(episodes[0]['count']) == len(parse_result.episode_numbers): return True @@ -279,11 +281,11 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many if not self.login(): return results - self.categories = "cat=" + str(self.cat) + self.categories = 'cat=' + str(self.cat) for mode in search_params: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_params[mode]: if mode == 'RSS': @@ -308,12 +310,12 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many search_url = self.urls['search_page'].format(z, self.categories) if mode != 'RSS': - logger.log(u"Search string: {}".format - (search_string.decode("utf-8")), logger.DEBUG) + logger.log('Search string: {0}'.format + (search_string), logger.DEBUG) data = self.get_url(search_url, returns='text') if not data: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue try: @@ -323,7 +325,7 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many # Continue only if one Release is found if len(torrent_rows) < 3: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) last_page = 1 continue @@ -340,60 +342,63 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many leechers = int(leechers.strip('[]')) seeders = result('td')[3]('td')[2].text seeders = int(seeders.strip('[]')) - torrent_size = result('td')[3]('td')[3].text.strip('[]') + " GB" + torrent_size = result('td')[3]('td')[3].text.strip('[]') + ' GB' size = convert_size(torrent_size) or -1 - except (AttributeError, TypeError): - continue - - filename_qt = self._reverseQuality(self._episodeQuality(result)) - for text in self.hdtext: - title1 = title - title = title.replace(text, filename_qt) - if title != title1: - break - - if Quality.nameQuality(title) == Quality.UNKNOWN: - title += filename_qt - - if not self._is_italian(result) and not self.subtitle: - logger.log(u"Torrent is subtitled, skipping: %s " % title, logger.DEBUG) - continue - - if self.engrelease and not self._is_english(result): - logger.log(u"Torrent isnt english audio/subtitled , skipping: %s " % title, logger.DEBUG) - continue - - search_show = re.split(r'([Ss][\d{1,2}]+)', search_string)[0] - show_title = search_show - rindex = re.search(r'([Ss][\d{1,2}]+)', title) - if rindex: - show_title = title[:rindex.start()] - ep_params = title[rindex.start():] - if show_title.lower() != search_show.lower() and search_show.lower() in show_title.lower(): - new_title = search_show + ep_params - title = new_title - - if not all([title, download_url]): - continue - - if self._is_season_pack(title): - title = re.sub(r'([Ee][\d{1,2}\-?]+)', '', title) - # Filter unseeded torrent - if seeders < min(self.minseed, 1): + filename_qt = self._reverseQuality(self._episodeQuality(result)) + for text in self.hdtext: + title1 = title + title = title.replace(text, filename_qt) + if title != title1: + break + + if Quality.nameQuality(title) == Quality.UNKNOWN: + title += filename_qt + + if not self._is_italian(result) and not self.subtitle: + logger.log('Torrent is subtitled, skipping: %s ' % title, logger.DEBUG) + continue + + if self.engrelease and not self._is_english(result): + logger.log('Torrent isnt english audio/subtitled , skipping: %s ' % title, logger.DEBUG) + continue + + search_show = re.split(r'([Ss][\d{1,2}]+)', search_string)[0] + show_title = search_show + rindex = re.search(r'([Ss][\d{1,2}]+)', title) + if rindex: + show_title = title[:rindex.start()] + ep_params = title[rindex.start():] + if show_title.lower() != search_show.lower() and search_show.lower() in show_title.lower(): + new_title = search_show + ep_params + title = new_title + + if not all([title, download_url]): + continue + + if self._is_season_pack(title): + title = re.sub(r'([Ee][\d{1,2}\-?]+)', '', title) + + # Filter unseeded torrent + if seeders < min(self.minseed, 1): + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + (title, seeders), logger.DEBUG) + continue + + item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format - (title, seeders), logger.DEBUG) - continue - - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != 'RSS': - logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format(title, seeders, leechers), logger.DEBUG) - items.append(item) + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue except Exception: - logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) results += items diff --git a/sickbeard/providers/tokyotoshokan.py b/sickbeard/providers/tokyotoshokan.py index a935ddd9a7..9fff0bf289 100644 --- a/sickbeard/providers/tokyotoshokan.py +++ b/sickbeard/providers/tokyotoshokan.py @@ -18,7 +18,10 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import unicode_literals + import re +import traceback from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser @@ -31,7 +34,7 @@ class TokyoToshokanProvider(TorrentProvider): # pylint: disable=too-many-instan def __init__(self): - TorrentProvider.__init__(self, "TokyoToshokan") + TorrentProvider.__init__(self, 'TokyoToshokan') self.public = True self.supports_absolute_numbering = True @@ -54,15 +57,15 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_params = { - "terms": search_string, - "type": 1, # get anime types + 'terms': search_string, + 'type': 1, # get anime types } data = self.get_url(self.urls['search'], params=search_params, returns='text') @@ -75,7 +78,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Continue only if one Release is found if len(torrent_rows) < 2: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue a = 1 if len(torrent_rows[0]('td')) < 2 else 0 @@ -93,24 +96,26 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man sl = re.match(r'S:(?P\d+)L:(?P\d+)C:(?:\d+)ID:(?:\d+)', stats.replace(' ', '')) seeders = try_int(sl.group('seeders')) if sl else 0 leechers = try_int(sl.group('leechers')) if sl else 0 - except StandardError: - continue - if not all([title, download_url]): - continue + if not all([title, download_url]): + continue - # Filter unseeded torrent - if seeders < min(self.minseed, 1): - if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format - (title, seeders), logger.DEBUG) - continue + # Filter unseeded torrent + if seeders < min(self.minseed, 1): + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + (title, seeders), logger.DEBUG) + continue - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != 'RSS': - logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG) + item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + if mode != 'RSS': + logger.log('Found result: %s with %s seeders and %s leechers' % (title, seeders, leechers), logger.DEBUG) - items.append(item) + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/torrentbytes.py b/sickbeard/providers/torrentbytes.py index 00d1a51fb3..cdc2eae814 100644 --- a/sickbeard/providers/torrentbytes.py +++ b/sickbeard/providers/torrentbytes.py @@ -21,6 +21,8 @@ from __future__ import unicode_literals import re +import traceback + from requests.compat import urljoin from requests.utils import dict_from_cookiejar @@ -36,7 +38,7 @@ class TorrentBytesProvider(TorrentProvider): # pylint: disable=too-many-instanc def __init__(self): # Provider Init - TorrentProvider.__init__(self, "TorrentBytes") + TorrentProvider.__init__(self, 'TorrentBytes') # Credentials self.username = None @@ -48,14 +50,14 @@ def __init__(self): self.freeleech = False # URLs - self.url = "https://www.torrentbytes.net" + self.url = 'https://www.torrentbytes.net' self.urls = { - "login": urljoin(self.url, "takelogin.php"), - "search": urljoin(self.url, "browse.php") + 'login': urljoin(self.url, 'takelogin.php'), + 'search': urljoin(self.url, 'browse.php') } # Proper Strings - self.proper_strings = ["PROPER", "REPACK"] + self.proper_strings = ['PROPER', 'REPACK'] # Cache self.cache = tvcache.TVCache(self) @@ -64,17 +66,17 @@ def login(self): if any(dict_from_cookiejar(self.session.cookies).values()): return True - login_params = {"username": self.username, - "password": self.password, - "login": "Log in!"} + login_params = {'username': self.username, + 'password': self.password, + 'login': 'Log in!'} - response = self.get_url(self.urls["login"], post_data=login_params, returns="text") + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log("Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False - if re.search("Username or password incorrect", response): - logger.log("Invalid username or password. Check your settings", logger.WARNING) + if re.search('Username or password incorrect', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -85,73 +87,74 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results search_params = { - "c41": 1, "c33": 1, "c38": 1, "c32": 1, "c37": 1 + 'c41': 1, 'c33': 1, 'c38': 1, 'c32': 1, 'c37': 1 } for mode in search_strings: items = [] - logger.log("Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: - if mode != "RSS": - logger.log("Search string: {}".format(search_string.decode("utf-8")), - logger.DEBUG) + if mode != 'RSS': + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) - search_params["search"] = search_string - data = self.get_url(self.urls["search"], params=search_params, returns="text") + search_params['search'] = search_string + data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: - logger.log("No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue - with BS4Parser(data, "html5lib") as html: - torrent_table = html.find("table", border="1") - torrent_rows = torrent_table("tr") if torrent_table else [] + with BS4Parser(data, 'html5lib') as html: + torrent_table = html.find('table', border='1') + torrent_rows = torrent_table('tr') if torrent_table else [] # Continue only if at least one Release is found if len(torrent_rows) < 2: - logger.log("Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue # "Type", "Name", Files", "Comm.", "Added", "TTL", "Size", "Snatched", "Seeders", "Leechers" - labels = [label.get_text(strip=True) for label in torrent_rows[0]("td")] + labels = [label.get_text(strip=True) for label in torrent_rows[0]('td')] for result in torrent_rows[1:]: try: - cells = result("td") + cells = result('td') - download_url = urljoin(self.url, cells[labels.index("Name")].find("a", href=re.compile(r"download.php\?id="))["href"]) - title_element = cells[labels.index("Name")].find("a", href=re.compile(r"details.php\?id=")) - title = title_element.get("title", "") or title_element.get_text(strip=True) + download_url = urljoin(self.url, cells[labels.index('Name')].find('a', href=re.compile(r'download.php\?id='))['href']) + title_element = cells[labels.index('Name')].find('a', href=re.compile(r'details.php\?id=')) + title = title_element.get('title', '') or title_element.get_text(strip=True) if not all([title, download_url]): continue if self.freeleech: # Free leech torrents are marked with green [F L] in the title (i.e. [F L]) - freeleech = cells[labels.index("Name")].find("font", color="green") - if not freeleech or freeleech.get_text(strip=True) != "[F\xa0L]": + freeleech = cells[labels.index('Name')].find('font', color='green') + if not freeleech or freeleech.get_text(strip=True) != '[F\xa0L]': continue - seeders = try_int(cells[labels.index("Seeders")].get_text(strip=True)) - leechers = try_int(cells[labels.index("Leechers")].get_text(strip=True)) + seeders = try_int(cells[labels.index('Seeders')].get_text(strip=True)) + leechers = try_int(cells[labels.index('Leechers')].get_text(strip=True)) # Filter unseeded torrent if seeders < min(self.minseed, 1): - if mode != "RSS": + if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format (title, seeders), logger.DEBUG) continue # Need size for failed downloads handling - torrent_size = cells[labels.index("Size")].get_text(strip=True) + torrent_size = cells[labels.index('Size')].get_text(strip=True) size = convert_size(torrent_size) or -1 item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != "RSS": - logger.log("Found result: {0} with {1} seeders and {2} leechers".format + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) items.append(item) - except (AttributeError, TypeError): + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue results += items diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py index a40958def1..ff04501530 100644 --- a/sickbeard/providers/torrentday.py +++ b/sickbeard/providers/torrentday.py @@ -18,7 +18,10 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import unicode_literals + import re +import traceback from requests.compat import urljoin from requests.exceptions import RequestException from requests.utils import add_dict_to_cookiejar, dict_from_cookiejar @@ -34,7 +37,7 @@ class TorrentDayProvider(TorrentProvider): # pylint: disable=too-many-instance- def __init__(self): # Provider Init - TorrentProvider.__init__(self, "TorrentDay") + TorrentProvider.__init__(self, 'TorrentDay') # Credentials self.username = None @@ -79,11 +82,11 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if re.search('You tried too often', response): - logger.log(u"Too many login access attempts", logger.WARNING) + logger.log('Too many login access attempts', logger.WARNING) return False try: @@ -96,7 +99,7 @@ def login(self): except Exception: pass - logger.log(u"Unable to obtain cookie", logger.WARNING) + logger.log('Unable to obtain cookie', logger.WARNING) return False def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-locals @@ -106,11 +109,11 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many for mode in search_params: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_params[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_string = '+'.join(search_string.split()) @@ -131,40 +134,45 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many try: jdata = response.json() except ValueError: # also catches JSONDecodeError if simplejson is installed - logger.log(u"Data returned from provider is not json", logger.ERROR) + logger.log('Data returned from provider is not json', logger.ERROR) continue torrents = jdata.get('Fs', [dict()])[0].get('Cn', {}).get('torrents', []) if not torrents: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue for torrent in torrents: - title = re.sub(r"\[.*\=.*\].*\[/.*\]", "", torrent['name']) if torrent['name'] else None - download_url = urljoin(self.urls['download'], '{}/{}'.format(torrent['id'], torrent['fname'])) if torrent['id'] and torrent['fname'] else None - - if not all([title, download_url]): - continue + try: + title = re.sub(r'\[.*\=.*\].*\[/.*\]', '', torrent['name']) if torrent['name'] else None + download_url = urljoin(self.urls['download'], '{}/{}'.format(torrent['id'], torrent['fname'])) if torrent['id'] and torrent['fname'] else None - seeders = int(torrent['seed']) if torrent['seed'] else 1 - leechers = int(torrent['leech']) if torrent['leech'] else 0 + if not all([title, download_url]): + continue - # Filter unseeded torrent - if seeders < min(self.minseed, 1): - if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format(title, seeders), logger.DEBUG) - continue + seeders = int(torrent['seed']) if torrent['seed'] else 1 + leechers = int(torrent['leech']) if torrent['leech'] else 0 - torrent_size = torrent['size'] - size = convert_size(torrent_size) or -1 + # Filter unseeded torrent + if seeders < min(self.minseed, 1): + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format(title, seeders), logger.DEBUG) + continue - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + torrent_size = torrent['size'] + size = convert_size(torrent_size) or -1 - if mode != 'RSS': - logger.log(u"Found result: {0} with {1} seeders and {2} leechers".format - (title, seeders, leechers), logger.DEBUG) + item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - items.append(item) + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) + + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py index 496cb68883..ba9c9c3aa9 100644 --- a/sickbeard/providers/torrentleech.py +++ b/sickbeard/providers/torrentleech.py @@ -21,6 +21,7 @@ from __future__ import unicode_literals import re +import traceback from requests.compat import urljoin from requests.utils import dict_from_cookiejar @@ -36,7 +37,7 @@ class TorrentLeechProvider(TorrentProvider): # pylint: disable=too-many-instanc def __init__(self): # Provider Init - TorrentProvider.__init__(self, "TorrentLeech") + TorrentProvider.__init__(self, 'TorrentLeech') # Credentials self.username = None @@ -47,14 +48,14 @@ def __init__(self): self.minleech = None # URLs - self.url = "https://torrentleech.org" + self.url = 'https://torrentleech.org' self.urls = { - "login": urljoin(self.url, "user/account/login/"), - "search": urljoin(self.url, "torrents/browse"), + 'login': urljoin(self.url, 'user/account/login/'), + 'search': urljoin(self.url, 'torrents/browse'), } # Proper Strings - self.proper_strings = ["PROPER", "REPACK"] + self.proper_strings = ['PROPER', 'REPACK'] # Cache self.cache = tvcache.TVCache(self) @@ -64,19 +65,19 @@ def login(self): return True login_params = { - "username": self.username.encode("utf-8"), - "password": self.password.encode("utf-8"), - "login": "submit", - "remember_me": "on", + 'username': self.username, + 'password': self.password, + 'login': 'submit', + 'remember_me': 'on', } - response = self.get_url(self.urls["login"], post_data=login_params, returns="text") + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log("Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False - if re.search("Invalid Username/password", response) or re.search("Login :: TorrentLeech.org", response): - logger.log("Invalid username or password. Check your settings", logger.WARNING) + if re.search('Invalid Username/password', response) or re.search('Login :: TorrentLeech.org', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -90,83 +91,85 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # 2,26,27,32,7,34,35 # Units - units = ["B", "KB", "MB", "GB", "TB", "PB"] + units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'] def process_column_header(td): - result = "" + result = '' if td.a: - result = td.a.get("title") + result = td.a.get('title') if not result: result = td.get_text(strip=True) return result for mode in search_strings: items = [] - logger.log("Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: - if mode != "RSS": - logger.log("Search string: {}".format(search_string.decode("utf-8")), + if mode != 'RSS': + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) - categories = ["2", "7", "35"] - categories += ["26", "32"] if mode == "Episode" else ["27"] + categories = ['2', '7', '35'] + categories += ['26', '32'] if mode == 'Episode' else ['27'] if self.show and self.show.is_anime: - categories += ["34"] + categories += ['34'] else: - categories = ["2", "26", "27", "32", "7", "34", "35"] + categories = ['2', '26', '27', '32', '7', '34', '35'] search_params = { - "categories": ",".join(categories), - "query": search_string + 'categories': ','.join(categories), + 'query': search_string } - data = self.get_url(self.urls["search"], params=search_params, returns="text") + data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: - logger.log("No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue - with BS4Parser(data, "html5lib") as html: - torrent_table = html.find("table", id="torrenttable") - torrent_rows = torrent_table("tr") if torrent_table else [] + with BS4Parser(data, 'html5lib') as html: + torrent_table = html.find('table', id='torrenttable') + torrent_rows = torrent_table('tr') if torrent_table else [] # Continue only if at least one Release is found if len(torrent_rows) < 2: - logger.log("Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue - labels = [process_column_header(label) for label in torrent_rows[0]("th")] + labels = [process_column_header(label) for label in torrent_rows[0]('th')] # Skip column headers for result in torrent_rows[1:]: try: - title = result.find("td", class_="name").find("a").get_text(strip=True) - download_url = urljoin(self.url, result.find("td", class_="quickdownload").find("a")["href"]) + title = result.find('td', class_='name').find('a').get_text(strip=True) + download_url = urljoin(self.url, result.find('td', class_='quickdownload').find('a')['href']) if not all([title, download_url]): continue - seeders = try_int(result.find("td", class_="seeders").get_text(strip=True)) - leechers = try_int(result.find("td", class_="leechers").get_text(strip=True)) + seeders = try_int(result.find('td', class_='seeders').get_text(strip=True)) + leechers = try_int(result.find('td', class_='leechers').get_text(strip=True)) # Filter unseeded torrent if seeders < min(self.minseed, 1): - if mode != "RSS": + if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" " minimum seeders: {0}. Seeders: {1})".format (title, seeders), logger.DEBUG) continue - torrent_size = result("td")[labels.index("Size")].get_text() + torrent_size = result('td')[labels.index('Size')].get_text() size = convert_size(torrent_size, units=units) or -1 item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != "RSS": - logger.log("Found result: {0} with {1} seeders and {2} leechers".format + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) items.append(item) - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue results += items diff --git a/sickbeard/providers/torrentz.py b/sickbeard/providers/torrentz.py index c9cad22098..eca4cbcebb 100644 --- a/sickbeard/providers/torrentz.py +++ b/sickbeard/providers/torrentz.py @@ -16,6 +16,8 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import unicode_literals + import re import traceback @@ -66,25 +68,25 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: search_url = self.urls['verified'] if self.confirmed else self.urls['feed'] if mode != 'RSS': - logger.log(u"Search string: {}".format - (search_string.decode("utf-8")), logger.DEBUG) + logger.log('Search string: {0}'.format + (search_string), logger.DEBUG) data = self.get_url(search_url, params={'q': search_string}, returns='text') if not data: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log("No data returned from provider", logger.DEBUG) continue if not data.startswith(". +from __future__ import unicode_literals + import re import traceback from requests.utils import dict_from_cookiejar @@ -25,7 +27,7 @@ from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser -from sickrage.helper.common import convert_size, try_int +from sickrage.helper.common import try_int from sickrage.helper.exceptions import AuthException from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -35,7 +37,7 @@ class TransmitTheNetProvider(TorrentProvider): # pylint: disable=too-many-insta def __init__(self): # Provider Init - TorrentProvider.__init__(self, "TransmitTheNet") + TorrentProvider.__init__(self, 'TransmitTheNet') # Credentials self.username = None @@ -61,7 +63,7 @@ def __init__(self): def _check_auth(self): if not self.username or not self.password: - raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.") + raise AuthException('Your authentication credentials for {0} are missing, check your config.'.format(self.name)) return True @@ -78,11 +80,11 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if re.search('Username Incorrect', response) or re.search('Password Incorrect', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -97,14 +99,14 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_params = { 'searchtext': search_string, 'filter_freeleech': (0, 1)[self.freeleech is True], 'order_by': ('seeders', 'time')[mode == 'RSS'], - "order_way": "desc" + 'order_way': 'desc' } if not search_string: @@ -112,67 +114,73 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue try: with BS4Parser(data, 'html5lib') as html: torrent_table = html.find('table', {'id': 'torrent_table'}) if not torrent_table: - logger.log(u"Data returned from %s does not contain any torrents" % self.name, logger.DEBUG) + logger.log('Data returned from %s does not contain any torrents' % self.name, logger.DEBUG) continue torrent_rows = torrent_table('tr', {'class': 'torrent'}) # Continue only if one Release is found if not torrent_rows: - logger.log(u"Data returned from %s does not contain any torrents" % self.name, logger.DEBUG) + logger.log('Data returned from %s does not contain any torrents' % self.name, logger.DEBUG) continue for torrent_row in torrent_rows: - freeleech = torrent_row.find('img', alt="Freeleech") is not None - if self.freeleech and not freeleech: - continue - - download_item = torrent_row.find('a', {'title': [ - 'Download Torrent', # Download link - 'Previously Grabbed Torrent File', # Already Downloaded - 'Currently Seeding Torrent', # Seeding - 'Currently Leeching Torrent', # Leeching - ]}) - - if not download_item: - continue - - download_url = urljoin(self.url, download_item['href']) - - temp_anchor = torrent_row.find('a', {"data-src": True}) - title = temp_anchor['data-src'].rsplit('.', 1)[0] - if not all([title, download_url]): - continue - - cells = torrent_row('td') - seeders = try_int(cells[8].text.strip()) - leechers = try_int(cells[9].text.strip()) - - # Filter unseeded torrent - if seeders < min(self.minseed, 1): + try: + freeleech = torrent_row.find('img', alt='Freeleech') is not None + if self.freeleech and not freeleech: + continue + + download_item = torrent_row.find('a', {'title': [ + 'Download Torrent', # Download link + 'Previously Grabbed Torrent File', # Already Downloaded + 'Currently Seeding Torrent', # Seeding + 'Currently Leeching Torrent', # Leeching + ]}) + + if not download_item: + continue + + download_url = urljoin(self.url, download_item['href']) + + temp_anchor = torrent_row.find('a', {'data-src': True}) + title = temp_anchor['data-src'].rsplit('.', 1)[0] + if not all([title, download_url]): + continue + + cells = torrent_row('td') + seeders = try_int(cells[8].text.strip()) + leechers = try_int(cells[9].text.strip()) + + # Filter unseeded torrent + if seeders < min(self.minseed, 1): + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the" + " minimum seeders: {0}. Seeders: {1})".format + (title, seeders), logger.DEBUG) + continue + + size = temp_anchor['data-filesize'] or -1 + + item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the" - u" minimum seeders: {0}. Seeders: {1})".format - (title, seeders), logger.DEBUG) - continue + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) - size = temp_anchor['data-filesize'] or -1 - - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != 'RSS': - logger.log(u"Found result: {0} with {1} seeders and {2} leechers".format - (title, seeders, leechers), logger.DEBUG) - - items.append(item) + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue except Exception: - logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) results += items diff --git a/sickbeard/providers/tvchaosuk.py b/sickbeard/providers/tvchaosuk.py index 0e1006e916..b36100dda7 100644 --- a/sickbeard/providers/tvchaosuk.py +++ b/sickbeard/providers/tvchaosuk.py @@ -17,6 +17,7 @@ from __future__ import unicode_literals import re +import traceback from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser @@ -95,15 +96,15 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log('Search Mode: {}'.format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode == 'Season': - search_string = re.sub(ur'(.*)S0?', ur'\1Series ', search_string) + search_string = re.sub(r'(.*)S0?', r'\1Series ', search_string) if mode != 'RSS': - logger.log('Search string: {}'.format(search_string), logger.DEBUG) + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_params['keywords'] = search_string data = self.get_url(self.urls['search'], post_data=search_params, returns='text') @@ -144,17 +145,17 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Chop off tracker/channel prefix or we cant parse the result! if mode != 'RSS' and search_params['keywords']: - show_name_first_word = re.search(ur'^[^ .]+', search_params['keywords']).group() + show_name_first_word = re.search(r'^[^ .]+', search_params['keywords']).group() if not title.startswith(show_name_first_word): - title = re.sub(ur'.*(' + show_name_first_word + '.*)', ur'\1', title) + title = re.sub(r'.*(' + show_name_first_word + '.*)', r'\1', title) # Change title from Series to Season, or we can't parse if mode == 'Season': - title = re.sub(ur'(.*)(?i)Series', ur'\1Season', title) + title = re.sub(r'(.*)(?i)Series', r'\1Season', title) # Strip year from the end or we can't parse it! - title = re.sub(ur'(.*)[\. ]?\(\d{4}\)', ur'\1', title) - title = re.sub(ur'\s+', ur' ', title) + title = re.sub(r'(.*)[\. ]?\(\d{4}\)', r'\1', title) + title = re.sub(r'\s+', r' ', title) torrent_size = torrent('td')[labels.index('Size')].get_text(strip=True) size = convert_size(torrent_size, units=units) or -1 @@ -165,7 +166,9 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man item = {'title': title + '.hdtv.x264', 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} items.append(item) - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue results += items diff --git a/sickbeard/providers/womble.py b/sickbeard/providers/womble.py index 7158e3da80..ec288d456d 100644 --- a/sickbeard/providers/womble.py +++ b/sickbeard/providers/womble.py @@ -31,7 +31,7 @@ class WombleProvider(NZBProvider): def __init__(self): - NZBProvider.__init__(self, 'Womble\'s Index') + NZBProvider.__init__(self, "Womble's Index") self.public = True diff --git a/sickbeard/providers/xthor.py b/sickbeard/providers/xthor.py index e3e7c4a2be..3bc1849961 100644 --- a/sickbeard/providers/xthor.py +++ b/sickbeard/providers/xthor.py @@ -18,7 +18,10 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import unicode_literals + import re +import traceback from requests.utils import dict_from_cookiejar @@ -34,7 +37,7 @@ class XthorProvider(TorrentProvider): # pylint: disable=too-many-instance-attri def __init__(self): # Provider Init - TorrentProvider.__init__(self, "Xthor") + TorrentProvider.__init__(self, 'Xthor') # Credentials self.username = None @@ -70,11 +73,11 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u"Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False if not re.search('donate.php', response): - logger.log(u"Invalid username or password. Check your settings", logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -117,7 +120,7 @@ def process_column_header(td): for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) # Sorting: 1: Name, 3: Comments, 5: Size, 6: Completed, 7: Seeders, 8: Leechers (4: Time ?) search_params['sort'] = (7, 4)[mode == 'RSS'] @@ -125,25 +128,25 @@ def process_column_header(td): for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format - (search_string.decode("utf-8")), logger.DEBUG) + logger.log('Search string: {0}'.format + (search_string), logger.DEBUG) search_params['search'] = search_string data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: - logger.log(u"No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue with BS4Parser(data, 'html5lib') as html: - torrent_table = html.find("table", class_="table2 table-bordered2") + torrent_table = html.find('table', class_='table2 table-bordered2') torrent_rows = [] if torrent_table: - torrent_rows = torrent_table("tr") + torrent_rows = torrent_table('tr') # Continue only if at least one Release is found if len(torrent_rows) < 2: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue # Catégorie, Nom du Torrent, (Download), (Bookmark), Com., Taille, Compl�t�, Seeders, Leechers @@ -157,7 +160,7 @@ def process_column_header(td): try: title = cells[labels.index('Nom du Torrent')].get_text(strip=True) - download_url = self.url + '/' + row.find("a", href=re.compile("download.php"))['href'] + download_url = self.url + '/' + row.find('a', href=re.compile('download.php'))['href'] if not all([title, download_url]): continue @@ -167,8 +170,8 @@ def process_column_header(td): # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the" - u" minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the" + " minimum seeders: {0}. Seeders: {1})".format (title, seeders), logger.DEBUG) continue @@ -177,11 +180,13 @@ def process_column_header(td): item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} if mode != 'RSS': - logger.log(u"Found result: {0} with {1} seeders and {2} leechers".format + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) items.append(item) - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue results += items diff --git a/sickbeard/providers/zooqle.py b/sickbeard/providers/zooqle.py index 775be50790..868ebf7f42 100644 --- a/sickbeard/providers/zooqle.py +++ b/sickbeard/providers/zooqle.py @@ -18,6 +18,7 @@ from __future__ import unicode_literals +import traceback from requests.compat import urljoin from sickbeard import logger, tvcache @@ -141,7 +142,9 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man (title, seeders, leechers), logger.DEBUG) items.append(item) - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue # For each search mode sort all the items by seeders if available diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py index 96f62875e5..1c48a944a5 100644 --- a/sickrage/providers/GenericProvider.py +++ b/sickrage/providers/GenericProvider.py @@ -16,6 +16,7 @@ # # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import unicode_literals import re import sickbeard @@ -97,21 +98,21 @@ def download_result(self, result): 'Referer': '/'.join(url.split('/')[:3]) + '/' }) - logger.log(u'Downloading a result from %s at %s' % (self.name, url)) + logger.log('Downloading a result from %s at %s' % (self.name, url)) if url.endswith(GenericProvider.TORRENT) and filename.endswith(GenericProvider.NZB): filename = replace_extension(filename, GenericProvider.TORRENT) if download_file(url, filename, session=self.session, headers=self.headers, hooks={'response': self.get_url_hook}): if self._verify_download(filename): - logger.log(u'Saved result to %s' % filename, logger.INFO) + logger.log('Saved result to %s' % filename, logger.INFO) return True - logger.log(u'Could not download %s' % url, logger.WARNING) + logger.log('Could not download %s' % url, logger.WARNING) remove_file_failed(filename) if urls: - logger.log(u'Failed to download any results', logger.WARNING) + logger.log('Failed to download any results', logger.WARNING) return False @@ -198,13 +199,13 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, if search_mode == 'sponly': if parse_result.episode_numbers: logger.log( - u'This is supposed to be a season pack search but the result %s is not a valid season pack, skipping it' % title, + 'This is supposed to be a season pack search but the result %s is not a valid season pack, skipping it' % title, logger.DEBUG ) add_cache_entry = True elif not [ep for ep in episodes if parse_result.season_number == (ep.season, ep.scene_season)[ep.show.is_scene]]: logger.log( - u'This season result %s is for a season we are not searching for, skipping it' % title, + 'This season result %s is for a season we are not searching for, skipping it' % title, logger.DEBUG ) add_cache_entry = True @@ -219,7 +220,7 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, ]): logger.log( - u'The result %s doesn\'t seem to match an episode that we are currently trying to snatch, skipping it' % title, + 'The result %s doesn\'t seem to match an episode that we are currently trying to snatch, skipping it' % title, logger.DEBUG) add_cache_entry = True @@ -231,7 +232,7 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, if not parse_result.is_air_by_date: logger.log( - u'This is supposed to be a date search but the result %s didn\'t parse as one, skipping it' % title, + 'This is supposed to be a date search but the result %s didn\'t parse as one, skipping it' % title, logger.DEBUG) add_cache_entry = True else: @@ -253,7 +254,7 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, same_day_special = True elif len(sql_results) != 1: logger.log( - u'Tried to look up the date for the episode %s but the database didn\'t give proper results, skipping it' % title, + 'Tried to look up the date for the episode %s but the database didn\'t give proper results, skipping it' % title, logger.WARNING) add_cache_entry = True @@ -265,7 +266,7 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, actual_episodes = parse_result.episode_numbers if add_cache_entry: - logger.log(u'Adding item from search to cache: %s' % title, logger.DEBUG) + logger.log('Adding item from search to cache: %s' % title, logger.DEBUG) # pylint: disable=protected-access # Access to a protected member of a client class ci = self.cache._addCacheEntry(title, url, seeders, leechers, size, pubdate, torrent_hash, parse_result=parse_result) @@ -285,10 +286,10 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, break if not episode_wanted: - logger.log(u'Ignoring result %s.' % (title), logger.DEBUG) + logger.log('Ignoring result %s.' % (title), logger.DEBUG) continue - logger.log(u'Found result %s at %s' % (title, url), logger.DEBUG) + logger.log('Found result %s at %s' % (title, url), logger.DEBUG) episode_object = [] for current_episode in actual_episodes: @@ -310,13 +311,13 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, if not episode_object: episode_number = SEASON_RESULT - logger.log(u'Separating full season result to check for later', logger.DEBUG) + logger.log('Separating full season result to check for later', logger.DEBUG) elif len(episode_object) == 1: episode_number = episode_object[0].episode - logger.log(u'Single episode result.', logger.DEBUG) + logger.log('Single episode result.', logger.DEBUG) else: episode_number = MULTI_EP_RESULT - logger.log(u'Separating multi-episode result to check for later - result contains episodes: %s' % str( + logger.log('Separating multi-episode result to check for later - result contains episodes: %s' % str( parse_result.episode_numbers), logger.DEBUG) if episode_number not in results: @@ -349,11 +350,11 @@ def get_result(self, episodes): @staticmethod def get_url_hook(response, **kwargs): - logger.log(u'{} URL: {} [Status: {}]'.format + logger.log('{} URL: {} [Status: {}]'.format (response.request.method, response.request.url, response.status_code), logger.DEBUG) if response.request.method == 'POST': - logger.log(u'With post data: {}'.format(response.request.body), logger.DEBUG) + logger.log('With post data: {}'.format(response.request.body), logger.DEBUG) def get_url(self, url, post_data=None, params=None, timeout=30, **kwargs): # pylint: disable=too-many-arguments, kwargs['hooks'] = {'response': self.get_url_hook} @@ -421,7 +422,7 @@ def _get_episode_search_strings(self, episode, add_string=''): if add_string: episode_string += ' ' + add_string - search_string['Episode'].append(episode_string.encode('utf-8').strip()) + search_string['Episode'].append(episode_string.strip()) return [search_string] @@ -440,7 +441,7 @@ def _get_season_search_strings(self, episode): else: episode_string += 'S%02d' % int(episode.scene_season) - search_string['Season'].append(episode_string.encode('utf-8').strip()) + search_string['Season'].append(episode_string.strip()) return [search_string] @@ -475,7 +476,7 @@ def _get_title_and_url(self, item): # pylint: disable=no-self-use url = item.get('link', '') if title: - title = u'' + title.replace(' ', '.') + title = title.replace(' ', '.') else: title = '' @@ -491,7 +492,7 @@ def _make_url(self, result): return '', '' urls = [] - filename = u'' + filename = '' if result.url.startswith('magnet'): try: @@ -506,12 +507,12 @@ def _make_url(self, result): torrent_hash = b16encode(b32decode(torrent_hash)).upper() if not torrent_hash: - logger.log(u'Unable to extract torrent hash from magnet: %s' % ex(result.url), logger.ERROR) + logger.log('Unable to extract torrent hash from magnet: %s' % ex(result.url), logger.ERROR) return urls, filename urls = [x.format(torrent_hash=torrent_hash, torrent_name=torrent_name) for x in self.bt_cache_urls] except Exception: - logger.log(u'Unable to extract torrent hash or name from magnet: %s' % ex(result.url), logger.ERROR) + logger.log('Unable to extract torrent hash or name from magnet: %s' % ex(result.url), logger.ERROR) return urls, filename else: urls = [result.url] From a74e35d4e5d92e9b91f96ea138a5313d5b888826 Mon Sep 17 00:00:00 2001 From: medariox Date: Fri, 10 Jun 2016 14:05:21 +0200 Subject: [PATCH 028/134] Next 11 providers --- sickbeard/providers/alpharatio.py | 2 +- sickbeard/providers/ilovetorrents.py | 80 +++++++++++-------- sickbeard/providers/iptorrents.py | 110 +++++++++++++------------- sickbeard/providers/kat.py | 31 +++++--- sickbeard/providers/morethantv.py | 25 +++--- sickbeard/providers/newpct.py | 28 ++++--- sickbeard/providers/newznab.py | 36 ++++++--- sickbeard/providers/norbits.py | 25 +++--- sickbeard/providers/nyaatorrents.py | 58 ++++++++------ sickbeard/providers/omgwtfnzbs.py | 6 +- sickbeard/providers/pretome.py | 114 ++++++++++++++------------- 11 files changed, 294 insertions(+), 221 deletions(-) diff --git a/sickbeard/providers/alpharatio.py b/sickbeard/providers/alpharatio.py index e75cd9530a..a66ed392a9 100644 --- a/sickbeard/providers/alpharatio.py +++ b/sickbeard/providers/alpharatio.py @@ -156,7 +156,7 @@ def process_column_header(td): if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1})'.format + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/ilovetorrents.py b/sickbeard/providers/ilovetorrents.py index b49b9e88ef..87e9f203b3 100644 --- a/sickbeard/providers/ilovetorrents.py +++ b/sickbeard/providers/ilovetorrents.py @@ -3,25 +3,26 @@ # # URL: https://sickrage.github.io # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals import re import traceback + from requests.compat import urljoin from requests.utils import dict_from_cookiejar @@ -43,9 +44,8 @@ def __init__(self): self.url = 'https://www.ilovetorrents.me/' self.urls = { 'login': urljoin(self.url, 'takelogin.php'), - 'detail': urljoin(self.url, 'details.php?id=%s'), 'search': urljoin(self.url, 'browse.php'), - 'download': urljoin(self.url, '%s'), + 'download': urljoin(self.url, '{link}'), } # Credentials @@ -94,16 +94,18 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man results = [] if not self.login(): return results + search_params = { 'cat': 0 } + for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format - (search_string), logger.DEBUG) + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_params['search'] = search_string @@ -111,52 +113,62 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if not data: continue - try: - with BS4Parser(data, 'html.parser') as html: - torrent_table = html.find('table', class_='koptekst') - torrent_rows = torrent_table('tr') if torrent_table else [] + with BS4Parser(data, 'html.parser') as html: + torrent_table = html.find('table', class_='koptekst') + torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if one Release is found - if len(torrent_rows) < 2: - logger.log(u'Data returned from provider does not contain any torrents', logger.DEBUG) - continue + # Continue only if one Release is found + if len(torrent_rows) < 2: + logger.log(u'Data returned from provider does not contain any torrents', logger.DEBUG) + continue - for result in torrent_rows[1:]: + for result in torrent_rows[1:]: + try: cells = result('td') - link = cells[1].find('a') - download_url = self.urls['download'] % cells[2].find('a')['href'] - - try: - title = link.getText() - seeders = int(cells[10].getText().replace(',', '')) - leechers = int(cells[11].getText().replace(',', '')) - torrent_size = cells[8].getText() - size = convert_size(torrent_size) or -1 - except (AttributeError, TypeError): - continue + + download_url = self.urls['download'].format(link=cells[2].find('a')['href']) + title = link.getText() if not all([title, download_url]): continue + seeders = int(cells[10].getText().replace(',', '')) + leechers = int(cells[11].getText().replace(',', '')) + # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log(u"Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue + # Use same failsafe as Bitsoup if seeders >= 32768 or leechers >= 32768: continue - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + torrent_size = cells[8].getText() + size = convert_size(torrent_size) or -1 + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': - logger.log(u'Found result: {0} with {1} seeders and {2} leechers'.format(title, seeders, leechers), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) items.append(item) - - except Exception: - logger.log(u'Failed parsing provider. Traceback: {0}'.format(traceback.format_exc()), logger.WARNING) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py index 6adf97079b..cb487bf83d 100644 --- a/sickbeard/providers/iptorrents.py +++ b/sickbeard/providers/iptorrents.py @@ -1,22 +1,20 @@ # coding=utf-8 # Author: seedboy # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals @@ -112,58 +110,64 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many if not data: continue - try: - data = re.sub(r'(?im)', '', data, 0) - with BS4Parser(data, 'html5lib') as html: - if not html: - logger.log('No data returned from provider', logger.DEBUG) - continue - - if html.find(text='No Torrents Found!'): - logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) - continue - - torrent_table = html.find('table', attrs={'class': 'torrents'}) - torrents = torrent_table('tr') if torrent_table else [] + data = re.sub(r'(?im)', '', data, 0) + with BS4Parser(data, 'html5lib') as html: + if not html: + logger.log('No data returned from provider', logger.DEBUG) + continue + + if html.find(text='No Torrents Found!'): + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + continue + + torrent_table = html.find('table', attrs={'class': 'torrents'}) + torrents = torrent_table('tr') if torrent_table else [] + + # Continue only if one release is found + if len(torrents) < 2: + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + continue + + for result in torrents[1:]: + try: + title = result('td')[1].find('a').text + download_url = self.urls['base_url'] + result('td')[3].find('a')['href'] + if not all([title, download_url]): + continue - # Continue only if one Release is found - if len(torrents) < 2: - logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) - continue + seeders = int(result.find('td', attrs={'class': 'ac t_seeders'}).text) + leechers = int(result.find('td', attrs={'class': 'ac t_leechers'}).text) - for result in torrents[1:]: - try: - title = result('td')[1].find('a').text - download_url = self.urls['base_url'] + result('td')[3].find('a')['href'] - seeders = int(result.find('td', attrs={'class': 'ac t_seeders'}).text) - leechers = int(result.find('td', attrs={'class': 'ac t_leechers'}).text) - torrent_size = result('td')[5].text - size = convert_size(torrent_size) or -1 - - if not all([title, download_url]): - continue - - # Filter unseeded torrent - if seeders < min(self.minseed, 1): - if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format - (title, seeders), logger.DEBUG) - continue - - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, - 'leechers': leechers, 'pubdate': None, 'hash': None} + # Filter unseeded torrent + if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log('Found result: {0} with {1} seeders and {2} leechers'.format(title, seeders, leechers), logger.DEBUG) - - items.append(item) - except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format + (title, seeders), logger.DEBUG) continue - except Exception: - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) + torrent_size = result('td')[5].text + size = convert_size(torrent_size) or -1 + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) + + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue + results += items return results diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py index 10b6fc6cc3..118fcf55fd 100644 --- a/sickbeard/providers/kat.py +++ b/sickbeard/providers/kat.py @@ -1,25 +1,26 @@ # coding=utf-8 # Author: Dustyn Gibson # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals import traceback import validators + from requests.compat import urljoin from sickbeard.bs4_parser import BS4Parser @@ -64,6 +65,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: search_params['q'] = search_string if mode != 'RSS' else '' @@ -110,27 +112,36 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue verified = bool(try_int(item.find('torrent:verified').get_text(strip=True))) if self.confirmed and not verified: if mode != 'RSS': - logger.log("Found result {0} but that doesn't seem like a verified result so I'm ignoring it".format(title), logger.DEBUG) + logger.log("Found result {0} but that doesn't seem like a verified" + " result so I'm ignoring it".format(title), logger.DEBUG) continue torrent_size = item.find('torrent:contentlength').get_text(strip=True) size = convert_size(torrent_size) or -1 info_hash = item.find('torrent:infohash').get_text(strip=True) - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, - 'leechers': leechers, 'pubdate': None, 'hash': info_hash} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': info_hash + } if mode != 'RSS': - logger.log('Found result: %s with %s seeders and %s leechers' % (title, seeders, leechers), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) items.append(item) - except (AttributeError, TypeError, KeyError, ValueError, IndexError): logger.log('Failed parsing provider. Traceback: {0!r}'.format (traceback.format_exc()), logger.ERROR) diff --git a/sickbeard/providers/morethantv.py b/sickbeard/providers/morethantv.py index 1ed625b713..bdab9316c4 100644 --- a/sickbeard/providers/morethantv.py +++ b/sickbeard/providers/morethantv.py @@ -1,22 +1,20 @@ # coding=utf-8 # Author: Dustyn Gibson # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals @@ -68,7 +66,8 @@ def __init__(self): def _check_auth(self): if not self.username or not self.password: - raise AuthException('Your authentication credentials for {0} are missing, check your config.'.format(self.name)) + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) return True @@ -175,8 +174,15 @@ def process_column_header(td): torrent_size = cells[labels.index('Size')].get_text(strip=True) size = convert_size(torrent_size, units=units) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, - 'leechers': leechers, 'pubdate': None, 'hash': None} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) @@ -186,6 +192,7 @@ def process_column_header(td): logger.log('Failed parsing provider. Traceback: {0!r}'.format (traceback.format_exc()), logger.ERROR) continue + results += items return results diff --git a/sickbeard/providers/newpct.py b/sickbeard/providers/newpct.py index 5a980739d4..dba63d9ace 100644 --- a/sickbeard/providers/newpct.py +++ b/sickbeard/providers/newpct.py @@ -1,29 +1,29 @@ # coding=utf-8 # Author: CristianBB # Greetings to Mr. Pine-apple - # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals -from requests.compat import urljoin import re import traceback +from requests.compat import urljoin + from sickbeard import helpers from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser @@ -117,11 +117,20 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man seeders = 1 leechers = 0 torrent_size = cells[labels.index('Tamaño')].get_text(strip=True) - size = convert_size(torrent_size) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': - logger.log('Found result: {0}'.format(title), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): @@ -210,4 +219,5 @@ def _processTitle(title): return title.strip() + provider = newpctProvider() diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index a13766e0ef..aed8770c14 100644 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -1,31 +1,33 @@ # coding=utf-8 # Author: Nic Wolfe # Rewrite: Dustyn Gibson (miigotu) - # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals -from requests.compat import urljoin + import os import re import time import validators - import sickbeard +import traceback + +from requests.compat import urljoin + from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser from sickbeard.common import cpu_presets @@ -367,9 +369,23 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man size = convert_size(item_size) or -1 - result = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + result = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) + items.append(result) - except StandardError: + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue # Since we arent using the search string, @@ -377,8 +393,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if 'tvdbid' in search_params: break - if torznab: - results.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results diff --git a/sickbeard/providers/norbits.py b/sickbeard/providers/norbits.py index 245909a8b9..7d12bcaaa3 100644 --- a/sickbeard/providers/norbits.py +++ b/sickbeard/providers/norbits.py @@ -1,22 +1,19 @@ # coding=utf-8 -"""A Norbits (https://norbits.net) provider""" - -# URL: https://sickrage.github.io # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals @@ -125,10 +122,18 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many info_hash = item.pop('info_hash', '') size = convert_size(item.pop('size', -1), -1) - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': info_hash} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': info_hash + } if mode != 'RSS': - logger.log('Found result: {0} with {1} seeders and {2} leechers'.format( - title, seeders, leechers), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): diff --git a/sickbeard/providers/nyaatorrents.py b/sickbeard/providers/nyaatorrents.py index cd365c9c33..f7b8b82725 100644 --- a/sickbeard/providers/nyaatorrents.py +++ b/sickbeard/providers/nyaatorrents.py @@ -1,22 +1,20 @@ # coding=utf-8 # Author: Mr_Orange # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals @@ -54,24 +52,23 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if self.show and not self.show.is_anime: return results + search_params = { + 'page': 'rss', + 'cats': '1_0', # All anime + 'sort': 2, # Sort Descending By Seeders + 'order': 1 + } + for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format - (search_string), logger.DEBUG) - - search_params = { - 'page': 'rss', - 'cats': '1_0', # All anime - 'sort': 2, # Sort Descending By Seeders - 'order': 1 - } - if mode != 'RSS': + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) + search_params['term'] = search_string - results = [] data = self.cache.getRSSFeed(self.url, params=search_params)['entries'] if not data: logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) @@ -95,27 +92,36 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log('Discarding torrent because it doesn\'t meet the' - ' minimum seeders: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue if self.confirmed and not verified and mode != 'RSS': - logger.log("Found result {0} but that doesn't seem like a verified result so I'm ignoring it".format - (title), logger.DEBUG) + logger.log("Found result {0} but that doesn't seem like a verified" + " result so I'm ignoring it".format(title), logger.DEBUG) continue size = convert_size(torrent_size) or -1 - result = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) - items.append(result) + items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) - continue + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/omgwtfnzbs.py b/sickbeard/providers/omgwtfnzbs.py index 5778989066..315efd8bb5 100644 --- a/sickbeard/providers/omgwtfnzbs.py +++ b/sickbeard/providers/omgwtfnzbs.py @@ -1,7 +1,6 @@ # coding=utf-8 # Author: Jordon Smith # -# # This file is part of Medusa. # # Medusa is free software: you can redistribute it and/or modify @@ -21,8 +20,8 @@ import re import traceback - import sickbeard + from sickbeard import logger, tvcache from sickrage.helper.common import convert_size, try_int @@ -30,6 +29,7 @@ class OmgwtfnzbsProvider(NZBProvider): + def __init__(self): NZBProvider.__init__(self, 'OMGWTFNZBs') @@ -106,6 +106,7 @@ def search(self, search_strings, age=0, ep_obj=None): for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: search_params['search'] = search_string if mode != 'RSS': @@ -158,4 +159,5 @@ def _getRSSData(self): } return self.getRSSFeed(self.provider.urls['rss'], params=search_params) + provider = OmgwtfnzbsProvider() diff --git a/sickbeard/providers/pretome.py b/sickbeard/providers/pretome.py index 591d385cb2..b4ff4da0c2 100644 --- a/sickbeard/providers/pretome.py +++ b/sickbeard/providers/pretome.py @@ -1,27 +1,26 @@ # coding=utf-8 # Author: Nick Sologoub # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals import re import traceback + from requests.compat import quote from requests.utils import dict_from_cookiejar @@ -104,68 +103,71 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many if not data: continue - try: - with BS4Parser(data, 'html5lib') as html: - # Continue only if one Release is found - empty = html.find('h2', text='No .torrents fit this filter criteria') - if empty: - logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) - continue - - torrent_table = html.find('table', attrs={'style': 'border: none; width: 100%;'}) - if not torrent_table: - logger.log('Could not find table of torrents', logger.ERROR) - continue + with BS4Parser(data, 'html5lib') as html: + # Continue only if one Release is found + empty = html.find('h2', text='No .torrents fit this filter criteria') + if empty: + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + continue - torrent_rows = torrent_table('tr', attrs={'class': 'browse'}) + torrent_table = html.find('table', attrs={'style': 'border: none; width: 100%;'}) + if not torrent_table: + logger.log('Could not find table of torrents', logger.ERROR) + continue - for result in torrent_rows: - try: - cells = result('td') - size = None - link = cells[1].find('a', attrs={'style': 'font-size: 1.25em; font-weight: bold;'}) + torrent_rows = torrent_table('tr', attrs={'class': 'browse'}) - torrent_id = link['href'].replace('details.php?id=', '') + for result in torrent_rows: + try: + cells = result('td') + size = None + link = cells[1].find('a', attrs={'style': 'font-size: 1.25em; font-weight: bold;'}) - if link.get('title', ''): - title = link['title'] - else: - title = link.contents[0] + torrent_id = link['href'].replace('details.php?id=', '') - download_url = self.urls['download'] % (torrent_id, link.contents[0]) - seeders = int(cells[9].contents[0]) - leechers = int(cells[10].contents[0]) + if link.get('title', ''): + title = link['title'] + else: + title = link.contents[0] - # Need size for failed downloads handling - if size is None: - torrent_size = cells[7].text - size = convert_size(torrent_size) or -1 + download_url = self.urls['download'] % (torrent_id, link.contents[0]) + if not all([title, download_url]): + continue - if not all([title, download_url]): - continue + seeders = int(cells[9].contents[0]) + leechers = int(cells[10].contents[0]) - # Filter unseeded torrent - if seeders < min(self.minseed, 1): - if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format - (title, seeders), logger.DEBUG) - continue + # Need size for failed downloads handling + if size is None: + torrent_size = cells[7].text + size = convert_size(torrent_size) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': - seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + # Filter unseeded torrent + if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log('Found result: %s with %s seeders and %s leechers' % (title, seeders, leechers), logger.DEBUG) - - items.append(item) - - except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format + (title, seeders), logger.DEBUG) continue - except Exception: - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) + + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items From f9a4213c07c45a940f821247255a37d6c8ab5106 Mon Sep 17 00:00:00 2001 From: medariox Date: Fri, 10 Jun 2016 14:54:31 +0200 Subject: [PATCH 029/134] Next 11 providers, removed sceneelite --- sickbeard/providers/newznab.py | 4 +- sickbeard/providers/rarbg.py | 34 ++++-- sickbeard/providers/rsstorrent.py | 26 ++-- sickbeard/providers/scc.py | 70 +++++------ sickbeard/providers/sceneelite.py | 153 ------------------------ sickbeard/providers/scenetime.py | 40 ++++--- sickbeard/providers/shazbat.py | 12 +- sickbeard/providers/speedcd.py | 28 +++-- sickbeard/providers/t411.py | 110 +++++++++-------- sickbeard/providers/thepiratebay.py | 27 +++-- sickbeard/providers/tntvillage.py | 172 ++++++++++++++------------- sickbeard/providers/tokyotoshokan.py | 39 +++--- sickbeard/providers/torrentbytes.py | 35 ++++-- 13 files changed, 334 insertions(+), 416 deletions(-) delete mode 100644 sickbeard/providers/sceneelite.py diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index aed8770c14..ef0d8ca50c 100644 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -369,7 +369,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man size = convert_size(item_size) or -1 - result = { + item = { 'title': title, 'link': download_url, 'size': size, @@ -382,7 +382,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) - items.append(result) + items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): logger.log('Failed parsing provider. Traceback: {0!r}'.format (traceback.format_exc()), logger.ERROR) diff --git a/sickbeard/providers/rarbg.py b/sickbeard/providers/rarbg.py index e2915554d1..4b29b63eab 100644 --- a/sickbeard/providers/rarbg.py +++ b/sickbeard/providers/rarbg.py @@ -1,20 +1,20 @@ # coding=utf-8 # Author: Dustyn Gibson # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals @@ -96,6 +96,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + if mode == 'RSS': search_params['sort'] = 'last' search_params['mode'] = 'list' @@ -157,10 +158,11 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man seeders = item.pop('seeders') leechers = item.pop('leechers') + if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1})".format + " minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -171,12 +173,24 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) - result = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - items.append(result) + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) + + items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) - continue + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/rsstorrent.py b/sickbeard/providers/rsstorrent.py index bfc43c1f48..d236a17e54 100644 --- a/sickbeard/providers/rsstorrent.py +++ b/sickbeard/providers/rsstorrent.py @@ -1,32 +1,32 @@ # coding=utf-8 # # Author: Mr_Orange # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals import io import os import re +import sickbeard + from requests.utils import add_dict_to_cookiejar + from bencode import bdecode -import sickbeard from sickbeard import helpers, logger, tvcache from sickrage.helper.encoding import ek @@ -189,18 +189,18 @@ def validateRSS(self): # pylint: disable=too-many-return-statements @staticmethod def dumpHTML(data): - dumpName = ek(os.path.join, sickbeard.CACHE_DIR, 'custom_torrent.html') + dump_name = ek(os.path.join, sickbeard.CACHE_DIR, 'custom_torrent.html') try: - fileOut = io.open(dumpName, 'wb') - fileOut.write(data) - fileOut.close() - helpers.chmodAsParent(dumpName) + file_out = io.open(dump_name, 'wb') + file_out.write(data) + file_out.close() + helpers.chmodAsParent(dump_name) except IOError as error: logger.log('Unable to save the file: {0}'.format(ex(error)), logger.ERROR) return False - logger.log('Saved custom_torrent html dump {0} '.format(dumpName), logger.INFO) + logger.log('Saved custom_torrent html dump {0} '.format(dump_name), logger.INFO) return True diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py index c58df97af1..588797f112 100644 --- a/sickbeard/providers/scc.py +++ b/sickbeard/providers/scc.py @@ -1,36 +1,32 @@ # coding=utf-8 # Author: Idan Gutman # Modified by jkaberg, https://github.com/jkaberg for SceneAccess - # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals import re import traceback -import time from requests.compat import urljoin, quote from requests.utils import dict_from_cookiejar -import sickbeard from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser -from sickbeard.common import cpu_presets from sickrage.helper.common import convert_size, try_int from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -99,21 +95,15 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - if mode != 'RSS': - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_url = self.urls['search'] % (quote(search_string), self.categories[mode]) - try: - data = self.get_url(search_url, returns='text') - time.sleep(cpu_presets[sickbeard.CPU_PRESET]) - except Exception as e: - logger.log('Unable to fetch data. Error: %s' % repr(e), logger.WARNING) - + data = self.get_url(search_url, returns='text') if not data: continue @@ -127,7 +117,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man continue for result in torrent_table('tr')[1:]: - try: link = result.find('td', attrs={'class': 'ttr_name'}).find('a') url = result.find('td', attrs={'class': 'td_dl'}).find('a') @@ -139,31 +128,42 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man with BS4Parser(data) as details_html: title = re.search("(?<=').+(? -# -# URL: https://sickrage.github.io -# -# This file is part of SickRage. -# -# SickRage is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# SickRage is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . - -from __future__ import unicode_literals - -import traceback -from requests.compat import urljoin -from requests.utils import dict_from_cookiejar - -from sickbeard import logger, tvcache - -from sickrage.helper.common import try_int -from sickrage.providers.torrent.TorrentProvider import TorrentProvider - - -class SceneEliteProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - - def __init__(self): - - # Provider Init - TorrentProvider.__init__(self, 'SceneElite') - - # Credentials - self.username = None - self.password = None - - # Torrent Stats - self.minseed = None - self.minleech = None - self.freeleech = None - - # URLs - self.url = 'https://sceneelite.org/' - self.urls = { - 'login': urljoin(self.url, '/api/v1/auth'), - 'search': urljoin(self.url, '/api/v1/torrents'), - 'download': urljoin(self.url, '/api/v1/torrents/download/'), - } - - # Proper Strings - self.proper_strings = ['PROPER', 'REPACK', 'REAL'] - cache_params = {'RSS': ['']} - # Cache - self.cache = tvcache.TVCache(self, min_time=0.1, search_params=cache_params) - - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password - } - - response = self.get_url(self.urls['login'], params=login_params, returns='json') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - return True - - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches - results = [] - if not self.login(): - return results - - # Search Params - search_params = { - 'extendedSearch': 'false', - 'hideOld': 'false', - 'index': '0', - 'limit': '100', - 'order': 'asc', - 'page': 'search', - 'sort': 'n', - 'categories[0]': 3, - 'categories[1]': 6, - 'categories[2]': 7 - } - - for mode in search_strings: - items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) - - for search_string in search_strings[mode]: - if mode != 'RSS': - logger.log('Search string: {0}'.format - (search_string), logger.DEBUG) - search_params['searchText'] = search_string - else: - search_params['page'] = 'last_seriebrowse' - results = [] - search_url = self.urls['search'] - try: - jdata = self.get_url(search_url, params=search_params, returns='json') - except ValueError: - logger.log('No data returned from provider', logger.DEBUG) - continue - for torrent in jdata: - try: - title = torrent.pop('name', '') - id = str(torrent.pop('id', '')) - if not id: - continue - seeders = try_int(torrent.pop('seeders', ''), 1) - leechers = try_int(torrent.pop('leechers', ''), 0) - freeleech = torrent.pop('frileech') - if self.freeleech and freeleech != 1: - continue - size = try_int(torrent.pop('size', ''), 0) - download_url = self.urls['download'] + id - - # Filter unseeded torrent - if seeders < min(self.minseed, 1): - if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format(title, seeders), logger.DEBUG) - continue - - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - - if mode != 'RSS': - logger.log('Found result: {0} with {1} seeders and {2} leechers'.format - (title, seeders, leechers), logger.DEBUG) - - items.append(item) - - except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) - continue - - results += items - - return results - - -provider = SceneEliteProvider() diff --git a/sickbeard/providers/scenetime.py b/sickbeard/providers/scenetime.py index d3d95ec870..3dde79cb37 100644 --- a/sickbeard/providers/scenetime.py +++ b/sickbeard/providers/scenetime.py @@ -1,25 +1,24 @@ # coding=utf-8 # Author: Idan Gutman # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . -import re +from __future__ import unicode_literals +import re import traceback from requests.compat import quote @@ -118,26 +117,35 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many title = link.get_text(strip=True) download_url = self.urls['download'] % (torrent_id, '%s.torrent' % title.replace(' ', '.')) + if not all([title, download_url]): + continue seeders = try_int(cells[labels.index('Seeders')].get_text(strip=True)) leechers = try_int(cells[labels.index('Leechers')].get_text(strip=True)) - torrent_size = cells[labels.index('Size')].get_text() - - size = convert_size(torrent_size) or -1 - - if not all([title, download_url]): - continue # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + torrent_size = cells[labels.index('Size')].get_text() + size = convert_size(torrent_size) or -1 + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': - logger.log('Found result: %s with %s seeders and %s leechers' % (title, seeders, leechers), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): diff --git a/sickbeard/providers/shazbat.py b/sickbeard/providers/shazbat.py index 8310513aa6..1b12c9da3d 100644 --- a/sickbeard/providers/shazbat.py +++ b/sickbeard/providers/shazbat.py @@ -1,28 +1,27 @@ # coding=utf-8 # Author: Nic Wolfe # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals from requests.compat import urljoin from sickbeard import logger, tvcache + from sickrage.helper.exceptions import AuthException from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -77,4 +76,5 @@ def _getRSSData(self): def _checkAuth(self, data): return self.provider._checkAuthFromData(data) # pylint: disable=protected-access + provider = ShazbatProvider() diff --git a/sickbeard/providers/speedcd.py b/sickbeard/providers/speedcd.py index 2e78ad7755..9539696276 100644 --- a/sickbeard/providers/speedcd.py +++ b/sickbeard/providers/speedcd.py @@ -1,22 +1,20 @@ # coding=utf-8 # Author: Dustyn Gibson # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals @@ -160,17 +158,27 @@ def process_column_header(td): # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format(title, seeders), logger.DEBUG) + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format + (title, seeders), logger.DEBUG) continue torrent_size = cells[labels.index('Size')].get_text() torrent_size = torrent_size[:-2] + ' ' + torrent_size[-2:] size = convert_size(torrent_size, units=units) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, - 'leechers': leechers, 'pubdate': None, 'hash': None} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': - logger.log('Found result: {0} with {1} seeders and {2} leechers'.format(title, seeders, leechers), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): diff --git a/sickbeard/providers/t411.py b/sickbeard/providers/t411.py index 65b2224797..3160e8e65c 100644 --- a/sickbeard/providers/t411.py +++ b/sickbeard/providers/t411.py @@ -1,31 +1,31 @@ # coding=utf-8 # Author: djoole # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals -from requests.auth import AuthBase import time import traceback +from requests.auth import AuthBase + from sickbeard import logger, tvcache from sickbeard.common import USER_AGENT + from sickrage.helper.common import try_int from sickrage.helper.common import convert_size from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -98,64 +98,71 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many logger.log('Search string: {0}'.format(search_string), logger.DEBUG) - search_urlS = ([self.urls['search'] % (search_string, u) for u in self.subcategories], [self.urls['rss']])[mode == 'RSS'] - for search_url in search_urlS: + search_urls = ([self.urls['search'] % (search_string, u) for u in self.subcategories], [self.urls['rss']])[mode == 'RSS'] + for search_url in search_urls: data = self.get_url(search_url, returns='json') if not data: continue - try: - if 'torrents' not in data and mode != 'RSS': - logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) - continue + if 'torrents' not in data and mode != 'RSS': + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + continue - torrents = data['torrents'] if mode != 'RSS' else data + torrents = data['torrents'] if mode != 'RSS' else data - if not torrents: - logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + if not torrents: + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + continue + + for torrent in torrents: + if mode == 'RSS' and 'category' in torrent and try_int(torrent['category'], 0) not in self.subcategories: continue - for torrent in torrents: - if mode == 'RSS' and 'category' in torrent and try_int(torrent['category'], 0) not in self.subcategories: + try: + title = torrent['name'] + torrent_id = torrent['id'] + download_url = (self.urls['download'] % torrent_id) + if not all([title, download_url]): continue - try: - title = torrent['name'] - torrent_id = torrent['id'] - download_url = (self.urls['download'] % torrent_id) - if not all([title, download_url]): - continue - - seeders = try_int(torrent['seeders']) - leechers = try_int(torrent['leechers']) - verified = bool(torrent['isVerified']) - torrent_size = torrent['size'] - - # Filter unseeded torrent - if seeders < min(self.minseed, 1): - if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format(title, seeders), logger.DEBUG) - continue - - if self.confirmed and not verified and mode != 'RSS': - logger.log("Found result {0} but that doesn't seem like a verified result so I'm ignoring it".format(title), logger.DEBUG) - continue - - size = convert_size(torrent_size) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, - 'leechers': leechers, 'pubdate': None, 'hash': None} - if mode != 'RSS': - logger.log('Found result: {0} with {1} seeders and {2} leechers'.format(title, seeders, leechers), logger.DEBUG) + seeders = try_int(torrent['seeders']) + leechers = try_int(torrent['leechers']) + verified = bool(torrent['isVerified']) - items.append(item) + # Filter unseeded torrent + if seeders < min(self.minseed, 1): + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format + (title, seeders), logger.DEBUG) + continue - except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) + if self.confirmed and not verified and mode != 'RSS': + logger.log("Found result {0} but that doesn't seem like a verified" + " result so I'm ignoring it".format(title), logger.DEBUG) continue - except Exception: - logger.log('Failed parsing provider. Traceback: %s' % traceback.format_exc(), logger.ERROR) + torrent_size = torrent['size'] + size = convert_size(torrent_size) or -1 + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) + + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items @@ -171,4 +178,5 @@ def __call__(self, r): r.headers['Authorization'] = self.token return r + provider = T411Provider() diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py index 0799d0ee01..c9e0498cb5 100644 --- a/sickbeard/providers/thepiratebay.py +++ b/sickbeard/providers/thepiratebay.py @@ -1,28 +1,27 @@ # coding=utf-8 # Author: Dustyn Gibson # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals import re import traceback import validators + from requests.compat import urljoin from sickbeard import logger, tvcache @@ -140,14 +139,16 @@ def process_column_header(th): # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1}".format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue # Accept Torrent only from Good People for every Episode Search if self.confirmed and not result.find(alt=re.compile(r'VIP|Trusted')): if mode != 'RSS': - logger.log("Found result {0} but that doesn't seem like a trusted result so I'm ignoring it".format(title), logger.DEBUG) + logger.log("Found result {0} but that doesn't seem like a trusted" + " result so I'm ignoring it".format(title), logger.DEBUG) continue # Convert size after all possible skip scenarios @@ -155,7 +156,15 @@ def process_column_header(th): torrent_size = re.sub(r'Size ([\d.]+).+([KMGT]iB)', r'\1 \2', torrent_size) size = convert_size(torrent_size, units=units) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index 776277f8ad..562eb61719 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -1,22 +1,21 @@ # coding=utf-8 # Author: Giovanni Borri # Modified by gborri, https://github.com/gborri for TNTVillage - # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals @@ -118,7 +117,7 @@ def __init__(self): def _check_auth(self): if not self.username or not self.password: - raise AuthException('Your authentication credentials for ' + self.name + ' are missing, check your config.') + raise AuthException('Your authentication credentials for {0} are missing, check your config.'.format(self.name)) return True @@ -229,10 +228,10 @@ def _is_italian(self, torrent_rows): if not name or name == 'None': return False - subFound = italian = False + sub_found = italian = False for sub in self.sub_string: if re.search(sub, name, re.I): - subFound = True + sub_found = True else: continue @@ -241,7 +240,7 @@ def _is_italian(self, torrent_rows): italian = True break - if not subFound and re.search('ita', name, re.I): + if not sub_found and re.search('ita', name, re.I): logger.log('Found Italian release: ' + name, logger.DEBUG) italian = True @@ -318,87 +317,92 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many logger.log('No data returned from provider', logger.DEBUG) continue - try: - with BS4Parser(data, 'html5lib') as html: - torrent_table = html.find('table', attrs={'class': 'copyright'}) - torrent_rows = torrent_table('tr') if torrent_table else [] + with BS4Parser(data, 'html5lib') as html: + torrent_table = html.find('table', attrs={'class': 'copyright'}) + torrent_rows = torrent_table('tr') if torrent_table else [] + + # Continue only if one Release is found + if len(torrent_rows) < 3: + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + last_page = 1 + continue + + if len(torrent_rows) < 42: + last_page = 1 + + for result in torrent_table('tr')[2:]: + try: + link = result.find('td').find('a') + title = link.string + download_url = self.urls['download'] % result('td')[8].find('a')['href'][-8:] + if not all([title, download_url]): + continue - # Continue only if one Release is found - if len(torrent_rows) < 3: - logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) - last_page = 1 - continue + leechers = result('td')[3]('td')[1].text + leechers = int(leechers.strip('[]')) + seeders = result('td')[3]('td')[2].text + seeders = int(seeders.strip('[]')) - if len(torrent_rows) < 42: - last_page = 1 - - for result in torrent_table('tr')[2:]: - - try: - link = result.find('td').find('a') - title = link.string - download_url = self.urls['download'] % result('td')[8].find('a')['href'][-8:] - leechers = result('td')[3]('td')[1].text - leechers = int(leechers.strip('[]')) - seeders = result('td')[3]('td')[2].text - seeders = int(seeders.strip('[]')) - torrent_size = result('td')[3]('td')[3].text.strip('[]') + ' GB' - size = convert_size(torrent_size) or -1 - - filename_qt = self._reverseQuality(self._episodeQuality(result)) - for text in self.hdtext: - title1 = title - title = title.replace(text, filename_qt) - if title != title1: - break - - if Quality.nameQuality(title) == Quality.UNKNOWN: - title += filename_qt - - if not self._is_italian(result) and not self.subtitle: - logger.log('Torrent is subtitled, skipping: %s ' % title, logger.DEBUG) - continue - - if self.engrelease and not self._is_english(result): - logger.log('Torrent isnt english audio/subtitled , skipping: %s ' % title, logger.DEBUG) - continue - - search_show = re.split(r'([Ss][\d{1,2}]+)', search_string)[0] - show_title = search_show - rindex = re.search(r'([Ss][\d{1,2}]+)', title) - if rindex: - show_title = title[:rindex.start()] - ep_params = title[rindex.start():] - if show_title.lower() != search_show.lower() and search_show.lower() in show_title.lower(): - new_title = search_show + ep_params - title = new_title - - if not all([title, download_url]): - continue - - if self._is_season_pack(title): - title = re.sub(r'([Ee][\d{1,2}\-?]+)', '', title) - - # Filter unseeded torrent - if seeders < min(self.minseed, 1): - if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format - (title, seeders), logger.DEBUG) - continue - - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + # Filter unseeded torrent + if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log('Found result: {0} with {1} seeders and {2} leechers'.format(title, seeders, leechers), logger.DEBUG) + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format + (title, seeders), logger.DEBUG) + continue + + filename_qt = self._reverseQuality(self._episodeQuality(result)) + for text in self.hdtext: + title1 = title + title = title.replace(text, filename_qt) + if title != title1: + break - items.append(item) - except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) + if Quality.nameQuality(title) == Quality.UNKNOWN: + title += filename_qt + + if not self._is_italian(result) and not self.subtitle: + logger.log('Torrent is subtitled, skipping: %s ' % title, logger.DEBUG) + continue + + if self.engrelease and not self._is_english(result): + logger.log('Torrent isnt english audio/subtitled , skipping: %s ' % title, logger.DEBUG) continue - except Exception: - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) + search_show = re.split(r'([Ss][\d{1,2}]+)', search_string)[0] + show_title = search_show + rindex = re.search(r'([Ss][\d{1,2}]+)', title) + if rindex: + show_title = title[:rindex.start()] + ep_params = title[rindex.start():] + if show_title.lower() != search_show.lower() and search_show.lower() in show_title.lower(): + new_title = search_show + ep_params + title = new_title + + if self._is_season_pack(title): + title = re.sub(r'([Ee][\d{1,2}\-?]+)', '', title) + + torrent_size = result('td')[3]('td')[3].text.strip('[]') + ' GB' + size = convert_size(torrent_size) or -1 + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) + + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/tokyotoshokan.py b/sickbeard/providers/tokyotoshokan.py index 9fff0bf289..bed2eb9351 100644 --- a/sickbeard/providers/tokyotoshokan.py +++ b/sickbeard/providers/tokyotoshokan.py @@ -1,22 +1,20 @@ # coding=utf-8 # Author: Mr_Orange # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals @@ -58,6 +56,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': logger.log('Search string: {0}'.format(search_string), @@ -88,28 +87,37 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man desc_top = top.find('td', class_='desc-top') title = desc_top.get_text(strip=True) download_url = desc_top.find('a')['href'] - - desc_bottom = bot.find('td', class_='desc-bot').get_text(strip=True) - size = convert_size(desc_bottom.split('|')[1].strip('Size: ')) or -1 + if not all([title, download_url]): + continue stats = bot.find('td', class_='stats').get_text(strip=True) sl = re.match(r'S:(?P\d+)L:(?P\d+)C:(?:\d+)ID:(?:\d+)', stats.replace(' ', '')) seeders = try_int(sl.group('seeders')) if sl else 0 leechers = try_int(sl.group('leechers')) if sl else 0 - if not all([title, download_url]): - continue - # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + desc_bottom = bot.find('td', class_='desc-bot').get_text(strip=True) + size = convert_size(desc_bottom.split('|')[1].strip('Size: ')) or -1 + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': - logger.log('Found result: %s with %s seeders and %s leechers' % (title, seeders, leechers), logger.DEBUG) + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): @@ -121,4 +129,5 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + provider = TokyoToshokanProvider() diff --git a/sickbeard/providers/torrentbytes.py b/sickbeard/providers/torrentbytes.py index cdc2eae814..7bd09655e8 100644 --- a/sickbeard/providers/torrentbytes.py +++ b/sickbeard/providers/torrentbytes.py @@ -1,22 +1,20 @@ -# coding=utf-8 +# coding=utf-8 # Author: Idan Gutman # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals @@ -87,12 +85,17 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results search_params = { - 'c41': 1, 'c33': 1, 'c38': 1, 'c32': 1, 'c37': 1 + 'c41': 1, + 'c33': 1, + 'c38': 1, + 'c32': 1, + 'c37': 1 } for mode in search_strings: items = [] - logger.log('Search Mode: {}'.format(mode), logger.DEBUG) + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': @@ -138,15 +141,23 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue - # Need size for failed downloads handling torrent_size = cells[labels.index('Size')].get_text(strip=True) size = convert_size(torrent_size) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) From 270e72ccb6234302d84cd335dfbf1da9feb6bf25 Mon Sep 17 00:00:00 2001 From: medariox Date: Fri, 10 Jun 2016 15:16:48 +0200 Subject: [PATCH 030/134] Last 9 providers --- sickbeard/providers/torrentday.py | 34 ++++--- sickbeard/providers/torrentleech.py | 23 +++-- sickbeard/providers/torrentz.py | 36 +++++--- sickbeard/providers/transmitthenet.py | 123 ++++++++++++++------------ sickbeard/providers/tvchaosuk.py | 26 ++++-- sickbeard/providers/womble.py | 11 ++- sickbeard/providers/xthor.py | 21 +++-- sickbeard/providers/zooqle.py | 8 +- 8 files changed, 164 insertions(+), 118 deletions(-) diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py index ff04501530..773ae5c499 100644 --- a/sickbeard/providers/torrentday.py +++ b/sickbeard/providers/torrentday.py @@ -1,27 +1,26 @@ # coding=utf-8 # Author: Mr_Orange # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals import re import traceback + from requests.compat import urljoin from requests.exceptions import RequestException from requests.utils import add_dict_to_cookiejar, dict_from_cookiejar @@ -110,6 +109,7 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many for mode in search_params: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_params[mode]: if mode != 'RSS': @@ -146,7 +146,6 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many try: title = re.sub(r'\[.*\=.*\].*\[/.*\]', '', torrent['name']) if torrent['name'] else None download_url = urljoin(self.urls['download'], '{}/{}'.format(torrent['id'], torrent['fname'])) if torrent['id'] and torrent['fname'] else None - if not all([title, download_url]): continue @@ -156,23 +155,32 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format(title, seeders), logger.DEBUG) + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format + (title, seeders), logger.DEBUG) continue torrent_size = torrent['size'] size = convert_size(torrent_size) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} - + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) - continue + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py index ba9c9c3aa9..2d51fafd4b 100644 --- a/sickbeard/providers/torrentleech.py +++ b/sickbeard/providers/torrentleech.py @@ -1,27 +1,26 @@ # coding=utf-8 # Author: Dustyn Gibson # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals import re import traceback + from requests.compat import urljoin from requests.utils import dict_from_cookiejar @@ -154,14 +153,22 @@ def process_column_header(td): if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1})".format + " minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue torrent_size = result('td')[labels.index('Size')].get_text() size = convert_size(torrent_size, units=units) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) diff --git a/sickbeard/providers/torrentz.py b/sickbeard/providers/torrentz.py index eca4cbcebb..2507674603 100644 --- a/sickbeard/providers/torrentz.py +++ b/sickbeard/providers/torrentz.py @@ -1,20 +1,20 @@ # coding=utf-8 # Author: Dustyn Gibson # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals @@ -92,25 +92,37 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man title_raw = item.title.text # Add "-" after codec and add missing "." - title = re.sub(r'([xh][ .]?264|xvid)( )', r'\1-', title_raw).replace(' ','.') if title_raw else '' - t_hash = item.guid.text.rsplit('/', 1)[-1] - - if not all([title, t_hash]): + title = re.sub(r'([xh][ .]?264|xvid)( )', r'\1-', title_raw).replace(' ', '.') if title_raw else '' + torrent_hash = item.guid.text.rsplit('/', 1)[-1] + if not all([title, torrent_hash]): continue - download_url = "magnet:?xt=urn:btih:" + t_hash + "&dn=" + title + self._custom_trackers + download_url = "magnet:?xt=urn:btih:" + torrent_hash + "&dn=" + title + self._custom_trackers torrent_size, seeders, leechers = self._split_description(item.find('description').text) size = convert_size(torrent_size) or -1 # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue - result = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': t_hash} - items.append(result) + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': torrent_hash + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) + + items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): logger.log('Failed parsing provider. Traceback: {0!r}'.format (traceback.format_exc()), logger.ERROR) diff --git a/sickbeard/providers/transmitthenet.py b/sickbeard/providers/transmitthenet.py index 03f8182432..6730ac8104 100644 --- a/sickbeard/providers/transmitthenet.py +++ b/sickbeard/providers/transmitthenet.py @@ -1,26 +1,25 @@ # coding=utf-8 # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals import re import traceback + from requests.utils import dict_from_cookiejar from requests.compat import urljoin @@ -96,6 +95,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': @@ -117,70 +118,74 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('No data returned from provider', logger.DEBUG) continue - try: - with BS4Parser(data, 'html5lib') as html: - torrent_table = html.find('table', {'id': 'torrent_table'}) - if not torrent_table: - logger.log('Data returned from %s does not contain any torrents' % self.name, logger.DEBUG) - continue - - torrent_rows = torrent_table('tr', {'class': 'torrent'}) - - # Continue only if one Release is found - if not torrent_rows: - logger.log('Data returned from %s does not contain any torrents' % self.name, logger.DEBUG) - continue + with BS4Parser(data, 'html5lib') as html: + torrent_table = html.find('table', {'id': 'torrent_table'}) + if not torrent_table: + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + continue - for torrent_row in torrent_rows: - try: - freeleech = torrent_row.find('img', alt='Freeleech') is not None - if self.freeleech and not freeleech: - continue + torrent_rows = torrent_table('tr', {'class': 'torrent'}) - download_item = torrent_row.find('a', {'title': [ - 'Download Torrent', # Download link - 'Previously Grabbed Torrent File', # Already Downloaded - 'Currently Seeding Torrent', # Seeding - 'Currently Leeching Torrent', # Leeching - ]}) + # Continue only if one Release is found + if not torrent_rows: + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + continue - if not download_item: - continue + for torrent_row in torrent_rows: + try: + freeleech = torrent_row.find('img', alt='Freeleech') is not None + if self.freeleech and not freeleech: + continue - download_url = urljoin(self.url, download_item['href']) + download_item = torrent_row.find('a', {'title': [ + 'Download Torrent', # Download link + 'Previously Grabbed Torrent File', # Already Downloaded + 'Currently Seeding Torrent', # Seeding + 'Currently Leeching Torrent', # Leeching + ]}) - temp_anchor = torrent_row.find('a', {'data-src': True}) - title = temp_anchor['data-src'].rsplit('.', 1)[0] - if not all([title, download_url]): - continue + if not download_item: + continue - cells = torrent_row('td') - seeders = try_int(cells[8].text.strip()) - leechers = try_int(cells[9].text.strip()) + download_url = urljoin(self.url, download_item['href']) - # Filter unseeded torrent - if seeders < min(self.minseed, 1): - if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1})".format - (title, seeders), logger.DEBUG) - continue + temp_anchor = torrent_row.find('a', {'data-src': True}) + title = temp_anchor['data-src'].rsplit('.', 1)[0] + if not all([title, download_url]): + continue - size = temp_anchor['data-filesize'] or -1 + cells = torrent_row('td') + seeders = try_int(cells[8].text.strip()) + leechers = try_int(cells[9].text.strip()) - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + # Filter unseeded torrent + if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log('Found result: {0} with {1} seeders and {2} leechers'.format - (title, seeders, leechers), logger.DEBUG) - - items.append(item) - except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) + logger.log("Discarding torrent because it doesn't meet the" + " minimum seeders: {0}. Seeders: {1}".format + (title, seeders), logger.DEBUG) continue - except Exception: - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) + + size = temp_anchor['data-filesize'] or -1 + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) + + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/tvchaosuk.py b/sickbeard/providers/tvchaosuk.py index b36100dda7..c091334afb 100644 --- a/sickbeard/providers/tvchaosuk.py +++ b/sickbeard/providers/tvchaosuk.py @@ -1,26 +1,28 @@ # coding=utf-8 # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals + import re import traceback from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser + from sickrage.helper.common import convert_size, try_int from sickrage.helper.exceptions import AuthException from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -52,7 +54,7 @@ def _check_auth(self): if self.username and self.password: return True - raise AuthException('Your authentication credentials for ' + self.name + ' are missing, check your config.') + raise AuthException('Your authentication credentials for {0} are missing, check your config.'.format(self.name)) def login(self): if len(self.session.cookies) >= 4: @@ -138,8 +140,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log('Discarding torrent because it doesn\'t meet the' - ' minimum seeders: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue @@ -160,11 +162,19 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man torrent_size = torrent('td')[labels.index('Size')].get_text(strip=True) size = convert_size(torrent_size, units=units) or -1 + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) - item = {'title': title + '.hdtv.x264', 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): logger.log('Failed parsing provider. Traceback: {0!r}'.format diff --git a/sickbeard/providers/womble.py b/sickbeard/providers/womble.py index ec288d456d..af5d075f9b 100644 --- a/sickbeard/providers/womble.py +++ b/sickbeard/providers/womble.py @@ -1,22 +1,20 @@ # coding=utf-8 # Author: Nic Wolfe # - -# -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals @@ -72,4 +70,5 @@ def updateCache(self): def _checkAuth(self, data): return data if data['feed'] and data['feed']['title'] != 'Invalid Link' else None + provider = WombleProvider() diff --git a/sickbeard/providers/xthor.py b/sickbeard/providers/xthor.py index 3bc1849961..0ecde09ebc 100644 --- a/sickbeard/providers/xthor.py +++ b/sickbeard/providers/xthor.py @@ -1,22 +1,21 @@ # coding=utf-8 # Author: adaur # Rewrite: Dustyn Gibson (miigotu) - # -# This file is part of SickRage. +# This file is part of Medusa. # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . from __future__ import unicode_literals @@ -171,14 +170,22 @@ def process_column_header(td): if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1})".format + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue torrent_size = cells[labels.index('Taille')].get_text() size = convert_size(torrent_size, units=units) or -1 - item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, 'hash': None} + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) diff --git a/sickbeard/providers/zooqle.py b/sickbeard/providers/zooqle.py index 868ebf7f42..fd69dc1240 100644 --- a/sickbeard/providers/zooqle.py +++ b/sickbeard/providers/zooqle.py @@ -19,6 +19,7 @@ from __future__ import unicode_literals import traceback + from requests.compat import urljoin from sickbeard import logger, tvcache @@ -119,13 +120,12 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log('Discarding torrent because it doesn\'t meet the' - ' minimum seeders: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue torrent_size = cells[4].get_text(strip=True) - size = convert_size(torrent_size, units=units) or -1 item = { @@ -147,8 +147,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man (traceback.format_exc()), logger.ERROR) continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results From 61d1076a499d0537b4bdca023c1f30ff3717b38b Mon Sep 17 00:00:00 2001 From: medariox Date: Fri, 10 Jun 2016 15:19:52 +0200 Subject: [PATCH 031/134] Remove sceneelite from init --- sickbeard/providers/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py index f4eec2c5b9..62c1dfeff6 100644 --- a/sickbeard/providers/__init__.py +++ b/sickbeard/providers/__init__.py @@ -25,7 +25,7 @@ omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, speedcd, nyaatorrents, bluetigers, xthor, abnormal, torrentbytes, cpasbien,\ freshontv, morethantv, t411, tokyotoshokan, shazbat, rarbg, alpharatio, tntvillage, binsearch, torrentproject, extratorrent, \ scenetime, btdigg, transmitthenet, tvchaosuk, bitcannon, pretome, gftracker, hdspace, newpct, elitetorrent, bitsnoop, danishbits, hd4free, limetorrents, \ - norbits, ilovetorrents, sceneelite, anizb, bithdtv, zooqle + norbits, ilovetorrents, anizb, bithdtv, zooqle __all__ = [ 'womble', 'btn', 'thepiratebay', 'kat', 'torrentleech', 'scc', 'hdtorrents', @@ -36,7 +36,7 @@ 'xthor', 'abnormal', 'scenetime', 'btdigg', 'transmitthenet', 'tvchaosuk', 'torrentproject', 'extratorrent', 'bitcannon', 'torrentz', 'pretome', 'gftracker', 'hdspace', 'newpct', 'elitetorrent', 'bitsnoop', 'danishbits', 'hd4free', 'limetorrents', - 'norbits', 'ilovetorrents', 'sceneelite', 'anizb', 'bithdtv', 'zooqle' + 'norbits', 'ilovetorrents', 'anizb', 'bithdtv', 'zooqle' ] From eb461f0097b6ec8d2c9c00a36729491a72034641 Mon Sep 17 00:00:00 2001 From: medariox Date: Fri, 10 Jun 2016 15:41:17 +0200 Subject: [PATCH 032/134] Renamed all search_params to search_strings --- sickbeard/providers/btn.py | 25 +++++++++++++------------ sickbeard/providers/freshontv.py | 6 +++--- sickbeard/providers/hdbits.py | 6 +++--- sickbeard/providers/iptorrents.py | 6 +++--- sickbeard/providers/norbits.py | 6 +++--- sickbeard/providers/pretome.py | 6 +++--- sickbeard/providers/scenetime.py | 6 +++--- sickbeard/providers/t411.py | 6 +++--- sickbeard/providers/tntvillage.py | 6 +++--- sickbeard/providers/torrentday.py | 6 +++--- 10 files changed, 40 insertions(+), 39 deletions(-) diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py index f9e0c41ef4..dd46b64007 100644 --- a/sickbeard/providers/btn.py +++ b/sickbeard/providers/btn.py @@ -18,13 +18,14 @@ from __future__ import unicode_literals -from datetime import datetime import jsonrpclib import math import socket import time - import sickbeard + +from datetime import datetime + from sickbeard import classes, logger, scene_exceptions, tvcache from sickbeard.common import cpu_presets from sickbeard.helpers import sanitizeSceneName @@ -63,13 +64,13 @@ def _checkAuthFromData(self, parsed_json): return self._check_auth() if 'api-error' in parsed_json: - logger.log('Incorrect authentication credentials: % s' % parsed_json['api-error'], logger.DEBUG) - raise AuthException( - 'Your authentication credentials for ' + self.name + ' are incorrect, check your config.') + logger.log('Incorrect authentication credentials: %s' % parsed_json['api-error'], logger.DEBUG) + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) return True - def search(self, search_params, age=0, ep_obj=None): # pylint:disable=too-many-locals + def search(self, search_strings, age=0, ep_obj=None): # pylint:disable=too-many-locals self._check_auth() @@ -81,9 +82,9 @@ def search(self, search_params, age=0, ep_obj=None): # pylint:disable=too-many- if age: params['age'] = '<=' + str(int(age)) - if search_params: - params.update(search_params) - logger.log('Search string: %s' % search_params, logger.DEBUG) + if search_strings: + params.update(search_strings) + logger.log('Search string: %s' % search_strings, logger.DEBUG) parsed_json = self._api_call(apikey, params) if not parsed_json: @@ -288,9 +289,9 @@ def _getRSSData(self): seconds_since_last_update = math.ceil(time.time() - time.mktime(self._getLastUpdate().timetuple())) # default to 15 minutes - seconds_minTime = self.minTime * 60 - if seconds_since_last_update < seconds_minTime: - seconds_since_last_update = seconds_minTime + seconds_min_time = self.minTime * 60 + if seconds_since_last_update < seconds_min_time: + seconds_since_last_update = seconds_min_time # Set maximum to 24 hours (24 * 60 * 60 = 86400 seconds) of 'RSS' data search, older things will need to be done through backlog if seconds_since_last_update > 86400: diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index 3ccc1f490c..ad2e636fe5 100644 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -103,17 +103,17 @@ def login(self): return False - def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements results = [] if not self.login(): return results freeleech = '3' if self.freeleech else '0' - for mode in search_params: + for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) - for search_string in search_params[mode]: + for search_string in search_strings[mode]: if mode != 'RSS': logger.log('Search string: {0}'.format(search_string), diff --git a/sickbeard/providers/hdbits.py b/sickbeard/providers/hdbits.py index fa22dfcdb9..04562fc481 100644 --- a/sickbeard/providers/hdbits.py +++ b/sickbeard/providers/hdbits.py @@ -75,16 +75,16 @@ def _get_title_and_url(self, item): return title, url - def search(self, search_params, age=0, ep_obj=None): + def search(self, search_strings, age=0, ep_obj=None): # FIXME results = [] - logger.log('Search string: {0}'.format(search_params), logger.DEBUG) + logger.log('Search string: {0}'.format(search_strings), logger.DEBUG) self._check_auth() - parsed_json = self.get_url(self.urls['search'], post_data=search_params, returns='json') + parsed_json = self.get_url(self.urls['search'], post_data=search_strings, returns='json') if not parsed_json: return [] diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py index cb487bf83d..e95b781cb2 100644 --- a/sickbeard/providers/iptorrents.py +++ b/sickbeard/providers/iptorrents.py @@ -86,17 +86,17 @@ def login(self): return True - def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements results = [] if not self.login(): return results freeleech = '&free=on' if self.freeleech else '' - for mode in search_params: + for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) - for search_string in search_params[mode]: + for search_string in search_strings[mode]: if mode != 'RSS': logger.log('Search string: {0}'.format(search_string), diff --git a/sickbeard/providers/norbits.py b/sickbeard/providers/norbits.py index 7d12bcaaa3..8bcedcc710 100644 --- a/sickbeard/providers/norbits.py +++ b/sickbeard/providers/norbits.py @@ -65,16 +65,16 @@ def _checkAuthFromData(self, parsed_json): # pylint: disable=invalid-name return True - def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-locals + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals """ Do the actual searching and JSON parsing""" results = [] - for mode in search_params: + for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) - for search_string in search_params[mode]: + for search_string in search_strings[mode]: if mode != 'RSS': logger.log('Search string: {0}'.format (search_string), logger.DEBUG) diff --git a/sickbeard/providers/pretome.py b/sickbeard/providers/pretome.py index b4ff4da0c2..4b289f87cb 100644 --- a/sickbeard/providers/pretome.py +++ b/sickbeard/providers/pretome.py @@ -83,15 +83,15 @@ def login(self): return True - def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-statements, too-many-locals + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-statements, too-many-locals results = [] if not self.login(): return results - for mode in search_params: + for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) - for search_string in search_params[mode]: + for search_string in search_strings[mode]: if mode != 'RSS': logger.log('Search string: {0}'.format(search_string), diff --git a/sickbeard/providers/scenetime.py b/sickbeard/providers/scenetime.py index 3dde79cb37..7ab271c546 100644 --- a/sickbeard/providers/scenetime.py +++ b/sickbeard/providers/scenetime.py @@ -72,15 +72,15 @@ def login(self): return True - def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals results = [] if not self.login(): return results - for mode in search_params: + for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) - for search_string in search_params[mode]: + for search_string in search_strings[mode]: if mode != 'RSS': logger.log('Search string: {0}'.format(search_string), diff --git a/sickbeard/providers/t411.py b/sickbeard/providers/t411.py index 3160e8e65c..c81601ee5d 100644 --- a/sickbeard/providers/t411.py +++ b/sickbeard/providers/t411.py @@ -84,15 +84,15 @@ def login(self): logger.log('Token not found in authentication response', logger.WARNING) return False - def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals, too-many-statements + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals, too-many-statements results = [] if not self.login(): return results - for mode in search_params: + for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) - for search_string in search_params[mode]: + for search_string in search_strings[mode]: if mode != 'RSS': logger.log('Search string: {0}'.format(search_string), diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index 562eb61719..ff27ae2159 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -275,17 +275,17 @@ def _is_season_pack(name): if int(episodes[0]['count']) == len(parse_result.episode_numbers): return True - def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements results = [] if not self.login(): return results self.categories = 'cat=' + str(self.cat) - for mode in search_params: + for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) - for search_string in search_params[mode]: + for search_string in search_strings[mode]: if mode == 'RSS': self.page = 2 diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py index 773ae5c499..d72742fa56 100644 --- a/sickbeard/providers/torrentday.py +++ b/sickbeard/providers/torrentday.py @@ -101,16 +101,16 @@ def login(self): logger.log('Unable to obtain cookie', logger.WARNING) return False - def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-locals + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals results = [] if not self.login(): return results - for mode in search_params: + for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) - for search_string in search_params[mode]: + for search_string in search_strings[mode]: if mode != 'RSS': logger.log('Search string: {0}'.format(search_string), From 29048b369443256db02ff7aba685ca0943a887a7 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 14:16:53 +0200 Subject: [PATCH 033/134] Fix for GFTracker --- sickbeard/providers/gftracker.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/sickbeard/providers/gftracker.py b/sickbeard/providers/gftracker.py index b9458c8f2e..a4b9594c56 100644 --- a/sickbeard/providers/gftracker.py +++ b/sickbeard/providers/gftracker.py @@ -63,7 +63,8 @@ def __init__(self): def _check_auth(self): if not self.username or not self.password: - raise AuthException('Your authentication credentials for ' + self.name + ' are missing, check your config.') + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) return True @@ -152,7 +153,9 @@ def process_column_header(td): try: cells = result('td') - title = cells[labels.index('Name')].find('a').find_next('a')['title'] or cells[labels.index('Name')].find('a')['title'] + title_anchor = cells[labels.index('Name')].find('a').find_next('a') or \ + cells[labels.index('Name')].find('a') + title = title_anchor.get('title') if title_anchor else None download_url = self.url + cells[labels.index('DL')].find('a')['href'] if not all([title, download_url]): continue From ea7b973d7a01b5f787514b1d8f5fe512cf7d1942 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 14:57:55 +0200 Subject: [PATCH 034/134] Fix TNTVillage --- sickbeard/providers/tntvillage.py | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index ff27ae2159..f258432f00 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -117,7 +117,8 @@ def __init__(self): def _check_auth(self): if not self.username or not self.password: - raise AuthException('Your authentication credentials for {0} are missing, check your config.'.format(self.name)) + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) return True @@ -137,7 +138,8 @@ def login(self): logger.log('Unable to connect to provider', logger.WARNING) return False - if re.search('Sono stati riscontrati i seguenti errori', response) or re.search('Connettiti', response): + if re.search('Sono stati riscontrati i seguenti errori', response) or \ + re.search('Connettiti', response): logger.log('Invalid username or password. Check your settings', logger.WARNING) return False @@ -181,7 +183,8 @@ def _episodeQuality(torrent_rows): # pylint: disable=too-many-return-statements if img_all: for img_type in img_all: try: - file_quality = file_quality + ' ' + img_type['src'].replace('style_images/mkportal-636/', '').replace('.gif', '').replace('.png', '') + file_quality = file_quality + ' ' + img_type['src'].replace('style_images/mkportal-636/', '') + file_quality = file_quality.replace('.gif', '').replace('.png', '') except Exception: logger.log('Failed parsing quality. Traceback: %s' % traceback.format_exc(), logger.ERROR) @@ -201,7 +204,8 @@ def checkName(options, func): if img_all: file_quality = (torrent_rows('td'))[1].get_text() - webdl = checkName(['webdl', 'webmux', 'webrip', 'dl-webmux', 'web-dlmux', 'webdl-mux', 'web-dl', 'webdlmux', 'dlmux'], any) + webdl = checkName(['webdl', 'webmux', 'webrip', 'dl-webmux', 'web-dlmux', + 'webdl-mux', 'web-dl', 'webdlmux', 'dlmux'], any) if sdOptions and not dvdOptions and not fullHD and not hdOptions: return Quality.SDTV @@ -272,7 +276,7 @@ def _is_season_pack(name): main_db_con = db.DBConnection() sql_selection = 'select count(*) as count from tv_episodes where showid = ? and season = ?' episodes = main_db_con.select(sql_selection, [parse_result.show.indexerid, parse_result.season_number]) - if int(episodes[0]['count']) == len(parse_result.episode_numbers): + if int(episodes[0][b'count']) == len(parse_result.episode_numbers): return True def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements @@ -333,8 +337,10 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for result in torrent_table('tr')[2:]: try: link = result.find('td').find('a') - title = link.string - download_url = self.urls['download'] % result('td')[8].find('a')['href'][-8:] + title = link.string if link else None + dl_link = result('td') + dl_url = dl_link[8].find('a')['href'][-8:] if len(dl_link) > 7 else None + download_url = self.urls['download'] % dl_url if dl_url else None if not all([title, download_url]): continue @@ -366,7 +372,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man continue if self.engrelease and not self._is_english(result): - logger.log('Torrent isnt english audio/subtitled , skipping: %s ' % title, logger.DEBUG) + logger.log('Torrent isnt english audio/subtitled, skipping: %s ' % title, logger.DEBUG) continue search_show = re.split(r'([Ss][\d{1,2}]+)', search_string)[0] From dce71b32617c354a2514bb9da71b95a234de0702 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 15:00:45 +0200 Subject: [PATCH 035/134] Fix HDTorrents --- sickbeard/providers/hdtorrents.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index 1734361005..b835ea903e 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -146,17 +146,15 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if len(cells) < len(labels): continue - title = cells[labels.index('Filename')].a.get_text(strip=True) - seeders = try_int(cells[labels.index('S')].get_text(strip=True)) - leechers = try_int(cells[labels.index('L')].get_text(strip=True)) - torrent_size = cells[labels.index('Size')].get_text() - - size = convert_size(torrent_size) or -1 + title = cells[labels.index('Filename')].a + title = title.get_text(strip=True) if title else None download_url = self.url + '/' + cells[labels.index('Dl')].a['href'] - if not all([title, download_url]): continue + seeders = try_int(cells[labels.index('S')].get_text(strip=True)) + leechers = try_int(cells[labels.index('L')].get_text(strip=True)) + # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': @@ -165,6 +163,9 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man (title, seeders), logger.DEBUG) continue + torrent_size = cells[labels.index('Size')].get_text() + size = convert_size(torrent_size) or -1 + item = { 'title': title, 'link': download_url, From 00f671279697158badb9d2830888b8b282cbd757 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 15:23:01 +0200 Subject: [PATCH 036/134] Fix Extratorrent --- sickbeard/providers/extratorrent.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/sickbeard/providers/extratorrent.py b/sickbeard/providers/extratorrent.py index ef71dd763c..2c622938b8 100644 --- a/sickbeard/providers/extratorrent.py +++ b/sickbeard/providers/extratorrent.py @@ -78,10 +78,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for item in html('item'): try: title = re.sub(r'^$', '', item.find('title').get_text(strip=True)) - seeders = try_int(item.find('seeders').get_text(strip=True)) - leechers = try_int(item.find('leechers').get_text(strip=True)) - torrent_size = item.find('size').get_text() - size = convert_size(torrent_size) or -1 if sickbeard.TORRENT_METHOD == 'blackhole': enclosure = item.find('enclosure') # Backlog doesnt have enclosure @@ -94,7 +90,12 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if not all([title, download_url]): continue - # Filter unseeded torrent + seeders = item.find('seeders') + seeders = try_int(seeders.get_text(strip=True)) if seeders else 1 + leechers = item.find('leechers') + leechers = try_int(leechers.get_text(strip=True)) if leechers else 0 + + # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" @@ -102,6 +103,10 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man (title, seeders), logger.DEBUG) continue + torrent_size = item.find('size') + torrent_size = torrent_size.get_text() if torrent_size else None + size = convert_size(torrent_size) or -1 + item = { 'title': title, 'link': download_url, From 4ce7ceb1d9aa0420e2229cfd5236fb350fcd427b Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 15:26:29 +0200 Subject: [PATCH 037/134] Fix HDSpace --- sickbeard/providers/hdspace.py | 93 +++++++++++++++++----------------- 1 file changed, 47 insertions(+), 46 deletions(-) diff --git a/sickbeard/providers/hdspace.py b/sickbeard/providers/hdspace.py index 1b21e229f8..805d409bd9 100644 --- a/sickbeard/providers/hdspace.py +++ b/sickbeard/providers/hdspace.py @@ -122,59 +122,60 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('Could not find main torrent table', logger.ERROR) continue - html = BS4Parser(data[index:], 'html5lib') - if not html: - logger.log('No html data parsed from provider', logger.DEBUG) - continue - - torrents = html('tr') - if not torrents: - continue - - # Skip column headers - for result in torrents[1:]: - if len(result.contents) < 10: - # skip extraneous rows at the end + with BS4Parser(data[index:], 'html5lib') as html: + if not html: + logger.log('No html data parsed from provider', logger.DEBUG) continue - try: - dl_href = result.find('a', attrs={'href': re.compile(r'download.php.*')})['href'] - title = re.search('f=(.*).torrent', dl_href).group(1).replace('+', '.') - download_url = self.urls['base_url'] + dl_href - seeders = int(result.find('span', attrs={'class': 'seedy'}).find('a').text) - leechers = int(result.find('span', attrs={'class': 'leechy'}).find('a').text) - torrent_size = re.match(r'.*?([0-9]+,?\.?[0-9]* [KkMmGg]+[Bb]+).*', str(result), re.DOTALL).group(1) - size = convert_size(torrent_size) or -1 + torrents = html('tr') + if not torrents: + continue - if not all([title, download_url]): + # Skip column headers + for result in torrents[1:]: + if len(result.contents) < 10: + # skip extraneous rows at the end continue - # Filter unseeded torrent - if seeders < min(self.minseed, 1): + try: + dl_href = result.find('a', attrs={'href': re.compile(r'download.php.*')})['href'] + title = re.search('f=(.*).torrent', dl_href).group(1).replace('+', '.') + download_url = self.urls['base_url'] + dl_href + if not all([title, download_url]): + continue + + seeders = int(result.find('span', attrs={'class': 'seedy'}).find('a').text) + leechers = int(result.find('span', attrs={'class': 'leechy'}).find('a').text) + + # Filter unseeded torrent + if seeders < min(self.minseed, 1): + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1})'.format + (title, seeders), logger.DEBUG) + continue + + torrent_size = re.match(r'.*?([0-9]+,?\.?[0-9]* [KkMmGg]+[Bb]+).*', str(result), re.DOTALL).group(1) + size = convert_size(torrent_size) or -1 + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1})'.format - (title, seeders), logger.DEBUG) - continue + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) - item = { - 'title': title, - 'link': download_url, - 'size': size, - 'seeders': seeders, - 'leechers': leechers, - 'pubdate': None, - 'hash': None - } - if mode != 'RSS': - logger.log('Found result: {0} with {1} seeders and {2} leechers'.format - (title, seeders, leechers), logger.DEBUG) - - items.append(item) - except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) - continue + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items From 030afbdc73be217240a7c53b6fef88bd2eac95c8 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 15:43:06 +0200 Subject: [PATCH 038/134] Use string in SQL with unicode_literals in GenericProvider --- sickrage/providers/GenericProvider.py | 28 +++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py index 1c48a944a5..71b57b8938 100644 --- a/sickrage/providers/GenericProvider.py +++ b/sickrage/providers/GenericProvider.py @@ -1,21 +1,21 @@ # coding=utf-8 -# This file is part of SickRage. +# This file is part of Medusa. # - # Git: https://github.com/PyMedusa/SickRage.git # -# SickRage is free software: you can redistribute it and/or modify +# Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # -# SickRage is distributed in the hope that it will be useful, +# Medusa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . +# along with Medusa. If not, see . + from __future__ import unicode_literals import re @@ -216,7 +216,7 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, parse_result.season_number is not None, parse_result.episode_numbers, [ep for ep in episodes if (ep.season, ep.scene_season)[ep.show.is_scene] == - parse_result.season_number and (ep.episode, ep.scene_episode)[ep.show.is_scene] in parse_result.episode_numbers] + parse_result.season_number and (ep.episode, ep.scene_episode)[ep.show.is_scene] in parse_result.episode_numbers] ]): logger.log( @@ -244,13 +244,13 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, ) if len(sql_results) == 2: - if int(sql_results[0]['season']) == 0 and int(sql_results[1]['season']) != 0: - actual_season = int(sql_results[1]['season']) - actual_episodes = [int(sql_results[1]['episode'])] + if int(sql_results[0][b'season']) == 0 and int(sql_results[1][b'season']) != 0: + actual_season = int(sql_results[1][b'season']) + actual_episodes = [int(sql_results[1][b'episode'])] same_day_special = True - elif int(sql_results[1]['season']) == 0 and int(sql_results[0]['season']) != 0: - actual_season = int(sql_results[0]['season']) - actual_episodes = [int(sql_results[0]['episode'])] + elif int(sql_results[1][b'season']) == 0 and int(sql_results[0][b'season']) != 0: + actual_season = int(sql_results[0][b'season']) + actual_episodes = [int(sql_results[0][b'episode'])] same_day_special = True elif len(sql_results) != 1: logger.log( @@ -259,8 +259,8 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, add_cache_entry = True if not add_cache_entry and not same_day_special: - actual_season = int(sql_results[0]['season']) - actual_episodes = [int(sql_results[0]['episode'])] + actual_season = int(sql_results[0][b'season']) + actual_episodes = [int(sql_results[0][b'episode'])] else: actual_season = parse_result.season_number actual_episodes = parse_result.episode_numbers From 293444cf495c917760fefa0b2a2c85f2b1a80480 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 15:57:08 +0200 Subject: [PATCH 039/134] Fix BITHDTV --- sickbeard/providers/bithdtv.py | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/sickbeard/providers/bithdtv.py b/sickbeard/providers/bithdtv.py index 7b90e13c4b..ac3245528e 100644 --- a/sickbeard/providers/bithdtv.py +++ b/sickbeard/providers/bithdtv.py @@ -18,6 +18,8 @@ from __future__ import unicode_literals +import traceback + from requests.compat import urljoin from requests.utils import dict_from_cookiejar @@ -90,7 +92,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_params['cat'] = 12 response = self.get_url(self.urls['search'], params=search_params, returns='response') - if not response.text: + if not response or not response.text: logger.log('No data returned from provider', logger.DEBUG) continue @@ -124,13 +126,12 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log('Discarding torrent because it doesn\'t meet the' - ' minimum seeders: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue torrent_size = '{size} {unit}'.format(size=cells[6].contents[0], unit=cells[6].contents[1].get_text()) - size = convert_size(torrent_size, units=units) or -1 item = { @@ -146,18 +147,18 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) - items.append(item) - except StandardError: + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue - # For each search mode sort all the items by seeders if available - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results def login(self): - """Login method used for logging in before doing search and torrent downloads""" + """Login method used for logging in before doing search and torrent downloads.""" if any(dict_from_cookiejar(self.session.cookies).values()): return True @@ -168,12 +169,12 @@ def login(self): response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: - logger.log(u'Unable to connect to provider', logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) self.session.cookies.clear() return False if '

Login failed!

' in response: - logger.log(u'Invalid username or password. Check your settings', logger.WARNING) + logger.log('Invalid username or password. Check your settings', logger.WARNING) self.session.cookies.clear() return False From 439defeff07ded4114e9b11a90f822184e4f1338 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 16:26:00 +0200 Subject: [PATCH 040/134] Fix TVChaosUK --- sickbeard/providers/tvchaosuk.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/sickbeard/providers/tvchaosuk.py b/sickbeard/providers/tvchaosuk.py index c091334afb..4890b7cdff 100644 --- a/sickbeard/providers/tvchaosuk.py +++ b/sickbeard/providers/tvchaosuk.py @@ -54,7 +54,8 @@ def _check_auth(self): if self.username and self.password: return True - raise AuthException('Your authentication credentials for {0} are missing, check your config.'.format(self.name)) + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) def login(self): if len(self.session.cookies) >= 4: @@ -116,11 +117,11 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man with BS4Parser(data, 'html5lib') as html: torrent_table = html.find(id='sortabletable') - torrent_rows = torrent_table("tr") if torrent_table else [] + torrent_rows = torrent_table('tr') if torrent_table else [] # Continue only if at least one Release is found if len(torrent_rows) < 2: - logger.log("Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue labels = [label.img['title'] if label.img else label.get_text(strip=True) for label in torrent_rows[0]('td')] @@ -129,8 +130,10 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if self.freeleech and not torrent.find('img', alt=re.compile('Free Torrent')): continue - title = torrent.find(class_='tooltip-content').div.get_text(strip=True) - download_url = torrent.find(title='Click to Download this Torrent!').parent['href'] + title = torrent.find(class_='tooltip-content') + title = title.div.get_text(strip=True) if title else None + download_url = torrent.find(title='Click to Download this Torrent!') + download_url = download_url.parent['href'] if download_url else None if not all([title, download_url]): continue From 715b7a83792018c8a4dfa6c0c7bd765a2df46c4f Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 17:28:54 +0200 Subject: [PATCH 041/134] Improve BitSnoop --- sickbeard/providers/bitsnoop.py | 80 ++++++++++++++++----------------- 1 file changed, 38 insertions(+), 42 deletions(-) diff --git a/sickbeard/providers/bitsnoop.py b/sickbeard/providers/bitsnoop.py index c6af13b84f..bbddffe8a1 100644 --- a/sickbeard/providers/bitsnoop.py +++ b/sickbeard/providers/bitsnoop.py @@ -21,9 +21,8 @@ import traceback import sickbeard -from bs4 import BeautifulSoup - from sickbeard import logger, tvcache +from sickbeard.bs4_parser import BS4Parser from sickrage.helper.common import convert_size, try_int from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -57,31 +56,31 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': logger.log('Search string: {0}'.format(search_string), logger.DEBUG) - try: - search_url = (self.urls['rss'], self.urls['search'] + search_string + '/s/d/1/?fmt=rss')[mode != 'RSS'] + search_url = (self.urls['rss'], self.urls['search'] + search_string + '/s/d/1/?fmt=rss')[mode != 'RSS'] - data = self.get_url(search_url, returns='text') - if not data: - logger.log('No data returned from provider', logger.DEBUG) - continue + data = self.get_url(search_url, returns='text') + if not data: + logger.log('No data returned from provider', logger.DEBUG) + continue - if not data.startswith(' Date: Sat, 11 Jun 2016 16:27:36 +0200 Subject: [PATCH 042/134] Added flag to newznab, for torznab providers. If it's torznab then results are sorted by seeders in search.py. --- sickbeard/providers/newznab.py | 8 ++++---- sickbeard/search.py | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index ef0d8ca50c..8365639725 100644 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -284,7 +284,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results for mode in search_strings: - torznab = False + self.torznab = False search_params = { 't': 'tvsearch' if 'tvdbid' in str(self.cap_tv_search) else 'search', 'limit': 100, @@ -331,9 +331,9 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man break try: - torznab = 'xmlns:torznab' in html.rss.attrs + self.torznab = 'xmlns:torznab' in html.rss.attrs except AttributeError: - torznab = False + self.torznab = False for item in html('item'): try: @@ -364,7 +364,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man peers = try_int(attr['value']) if attr['name'] == 'peers' else None leechers = peers - seeders if peers else leechers - if not item_size or (torznab and (seeders is -1 or leechers is -1)): + if not item_size or (self.torznab and (seeders is -1 or leechers is -1)): continue size = convert_size(item_size) or -1 diff --git a/sickbeard/search.py b/sickbeard/search.py index 1457d8f805..dc6d891985 100644 --- a/sickbeard/search.py +++ b/sickbeard/search.py @@ -585,7 +585,7 @@ def searchProviders(show, episodes, forced_search=False, downCurQuality=False, m foundResults[cur_provider.name][curEp] = searchResults[curEp] # Sort the list by seeders if possible - if cur_provider.provider_type == 'torrent': + if cur_provider.provider_type == 'torrent' or getattr(cur_provider, 'torznab'): foundResults[cur_provider.name][curEp].sort(key=lambda d: int(d.seeders), reverse=True) break From 97af77d60290e4d1e16d2dc0f01275df65dd0601 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 18:13:52 +0200 Subject: [PATCH 043/134] Improve Anizb --- sickbeard/providers/anizb.py | 62 ++++++++++++++++++++---------------- 1 file changed, 34 insertions(+), 28 deletions(-) diff --git a/sickbeard/providers/anizb.py b/sickbeard/providers/anizb.py index 2ee9d7403f..2eb4a3f133 100644 --- a/sickbeard/providers/anizb.py +++ b/sickbeard/providers/anizb.py @@ -20,18 +20,18 @@ import traceback +from requests.compat import urljoin + from sickbeard import logger, tvcache +from sickbeard.bs4_parser import BS4Parser from sickrage.providers.nzb.NZBProvider import NZBProvider from sickrage.helper.common import try_int -from requests.compat import urljoin - -from bs4 import BeautifulSoup - class Anizb(NZBProvider): # pylint: disable=too-many-instance-attributes - """Nzb Provider using the open api of anizb.org for daily (rss) and backlog/forced searches""" + """Nzb Provider using the open api of anizb.org for daily (rss) and backlog/forced searches.""" + def __init__(self): # Provider Init @@ -63,13 +63,14 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results for mode in search_strings: + items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': logger.log('Search string: {0}'.format(search_string), logger.DEBUG) - try: search_url = (self.urls['rss'], self.urls['api'] + search_string)[mode != 'RSS'] data = self.get_url(search_url, returns='text') @@ -81,32 +82,37 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('Expected xml but got something else, is your mirror failing?', logger.INFO) continue - data = BeautifulSoup(data, 'html5lib') - entries = data('item') - if not entries: - logger.log('Returned xml contained no results', logger.INFO) - continue - - for item in entries: - try: - title = item.title.get_text(strip=True) - download_url = item.enclosure.get('url').strip() + with BS4Parser(data, 'html5lib') as html: + entries = html('item') + if not entries: + logger.log('Returned xml contained no results', logger.INFO) + continue - if not (title and download_url): + for item in entries: + try: + title = item.title.get_text(strip=True) + download_url = item.enclosure.get('url').strip() + if not (title and download_url): + continue + + # description = item.find('description') + size = try_int(item.enclosure.get('length', -1)) + + item = { + 'title': title, + 'link': download_url, + 'size': size + } + + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue - # description = item.find('description') - size = try_int(item.enclosure.get('length', -1)) - except (AttributeError, TypeError, KeyError, ValueError): - continue - - result = {'title': title, 'link': download_url, 'size': size} - results.append(result) + results += items - except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) + return results - return results provider = Anizb() From 4229c79d33c3eb20a8630757592738d52b4631da Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 18:23:51 +0200 Subject: [PATCH 044/134] Improve Bluetigers --- sickbeard/providers/bluetigers.py | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/sickbeard/providers/bluetigers.py b/sickbeard/providers/bluetigers.py index 2e6ce79c4a..d7adffb050 100644 --- a/sickbeard/providers/bluetigers.py +++ b/sickbeard/providers/bluetigers.py @@ -100,16 +100,16 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if not data: continue - try: - with BS4Parser(data, 'html5lib') as html: - result_linkz = html('a', href=re.compile('torrents-details')) + with BS4Parser(data, 'html5lib') as html: + result_linkz = html('a', href=re.compile('torrents-details')) - if not result_linkz: - logger.log('Data returned from provider do not contains any torrent', logger.DEBUG) - continue + if not result_linkz: + logger.log('Data returned from provider do not contains any torrent', logger.DEBUG) + continue - if result_linkz: - for link in result_linkz: + if result_linkz: + for link in result_linkz: + try: title = link.text download_url = self.urls['base_url'] + link['href'] download_url = download_url.replace('torrents-details', 'download') @@ -143,12 +143,12 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man items.append(item) - except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) - continue + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue - results += items + results += items return results From fd63285ddda58a89f2e83ddd4bdd45e630d75272 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 18:26:28 +0200 Subject: [PATCH 045/134] Cleanup BTdigg --- sickbeard/providers/btdigg.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/sickbeard/providers/btdigg.py b/sickbeard/providers/btdigg.py index 4b6a0abe92..bd737da185 100644 --- a/sickbeard/providers/btdigg.py +++ b/sickbeard/providers/btdigg.py @@ -57,11 +57,14 @@ def __init__(self): def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals results = [] search_params = {'p': 0} + for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: search_params['q'] = search_string + if mode != 'RSS': search_params['order'] = 0 logger.log('Search string: {0}'.format(search_string), logger.DEBUG) @@ -74,6 +77,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_url = self.custom_url + 'api/private-341ada3245790954/s02' else: search_url = self.urls['api'] + jdata = self.get_url(search_url, params=search_params, returns='json') if not jdata: logger.log('Provider did not return data', logger.DEBUG) From 238cef657defce83cf976f4d93c4d0cc316be46a Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 18:38:54 +0200 Subject: [PATCH 046/134] Improve Hounddawgs --- sickbeard/providers/cpasbien.py | 1 + sickbeard/providers/extratorrent.py | 1 + sickbeard/providers/hdtorrents.py | 1 + sickbeard/providers/hounddawgs.py | 90 ++++++++++++++--------------- 4 files changed, 47 insertions(+), 46 deletions(-) diff --git a/sickbeard/providers/cpasbien.py b/sickbeard/providers/cpasbien.py index 039d2d141c..3550c42c1f 100644 --- a/sickbeard/providers/cpasbien.py +++ b/sickbeard/providers/cpasbien.py @@ -48,6 +48,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': diff --git a/sickbeard/providers/extratorrent.py b/sickbeard/providers/extratorrent.py index 2c622938b8..9675e1e6a2 100644 --- a/sickbeard/providers/extratorrent.py +++ b/sickbeard/providers/extratorrent.py @@ -58,6 +58,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': logger.log('Search string: {0}'.format(search_string), logger.DEBUG) diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index b835ea903e..e784d5ba22 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -90,6 +90,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': diff --git a/sickbeard/providers/hounddawgs.py b/sickbeard/providers/hounddawgs.py index 4f5eb9d704..b4f91c7e62 100644 --- a/sickbeard/providers/hounddawgs.py +++ b/sickbeard/providers/hounddawgs.py @@ -102,6 +102,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': @@ -121,48 +122,41 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if not trimmed_data: continue - try: - with BS4Parser(trimmed_data, 'html5lib') as html: - result_table = html.find('table', {'id': 'torrent_table'}) - - if not result_table: - logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) - continue + with BS4Parser(trimmed_data, 'html5lib') as html: + result_table = html.find('table', {'id': 'torrent_table'}) - result_tbody = result_table.find('tbody') - entries = result_tbody.contents - del entries[1::2] + if not result_table: + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + continue - for result in entries[1:]: + result_tbody = result_table.find('tbody') + entries = result_tbody.contents + del entries[1::2] + for result in entries[1:]: + try: torrent = result('td') if len(torrent) <= 1: break all_as = (torrent[1])('a') + notinternal = result.find('img', src='/static//common/user_upload.png') + if self.ranked and notinternal: + logger.log('Found a user uploaded release, Ignoring it..', logger.DEBUG) + continue - try: - notinternal = result.find('img', src='/static//common/user_upload.png') - if self.ranked and notinternal: - logger.log('Found a user uploaded release, Ignoring it..', logger.DEBUG) - continue - freeleech = result.find('img', src='/static//common/browse/freeleech.png') - if self.freeleech and not freeleech: - continue - title = all_as[2].string - download_url = self.urls['base_url'] + all_as[0].attrs['href'] - torrent_size = result.find('td', class_='nobr').find_next_sibling('td').string - if torrent_size: - size = convert_size(torrent_size) or -1 - seeders = try_int((result('td')[6]).text.replace(',', '')) - leechers = try_int((result('td')[7]).text.replace(',', '')) - - except (AttributeError, TypeError): + freeleech = result.find('img', src='/static//common/browse/freeleech.png') + if self.freeleech and not freeleech: continue + title = all_as[2].string + download_url = self.urls['base_url'] + all_as[0].attrs['href'] if not all([title, download_url]): continue + seeders = try_int((result('td')[6]).text.replace(',', '')) + leechers = try_int((result('td')[7]).text.replace(',', '')) + # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': @@ -171,24 +165,28 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man (title, seeders), logger.DEBUG) continue - item = { - 'title': title, - 'link': download_url, - 'size': size, - 'seeders': seeders, - 'leechers': leechers, - 'pubdate': None, - 'hash': None - } - if mode != 'RSS': - logger.log('Found result: {0} with {1} seeders and {2} leechers'.format - (title, seeders, leechers), logger.DEBUG) - - items.append(item) - except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) - continue + torrent_size = result.find('td', class_='nobr').find_next_sibling('td').string + if torrent_size: + size = convert_size(torrent_size) or -1 + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) + + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue results += items From d10f39c2b8a112200b097598e5ebda722a719427 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 18:48:43 +0200 Subject: [PATCH 047/134] Improve FreshOn --- sickbeard/providers/freshontv.py | 71 ++++++++++++++----------------- sickbeard/providers/iptorrents.py | 7 ++- 2 files changed, 36 insertions(+), 42 deletions(-) diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index ad2e636fe5..62bf13f452 100644 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -113,6 +113,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': @@ -129,9 +130,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('No data returned from provider', logger.DEBUG) continue - try: - with BS4Parser(init_html, 'html5lib') as init_soup: - + with BS4Parser(init_html, 'html5lib') as init_soup: + try: # Check to see if there is more than 1 page of results pager = init_soup.find('div', {'class': 'pager'}) if pager: @@ -153,9 +153,9 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # limit RSS search if max_page_number > 3 and mode == 'RSS': max_page_number = 3 - except Exception: - logger.log('Failed parsing provider. Traceback: %s' % traceback.format_exc(), logger.ERROR) - continue + except Exception: + logger.log('Failed parsing provider. Traceback: %s' % traceback.format_exc(), logger.ERROR) + continue data_response_list = [init_html] @@ -173,45 +173,33 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man data_response_list.append(page_html) - try: - - for data_response in data_response_list: - - with BS4Parser(data_response, 'html5lib') as html: + for data_response in data_response_list: - torrent_rows = html('tr', {'class': re.compile('torrent_[0-9]*')}) + with BS4Parser(data_response, 'html5lib') as html: + torrent_rows = html('tr', {'class': re.compile('torrent_[0-9]*')}) - # Continue only if a Release is found - if not torrent_rows: - logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) - continue + # Continue only if a Release is found + if not torrent_rows: + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + continue - for individual_torrent in torrent_rows: + for individual_torrent in torrent_rows: + try: # skip if torrent has been nuked due to poor quality if individual_torrent.find('img', alt='Nuked') is not None: continue - try: - title = individual_torrent.find('a', {'class': 'torrent_name_link'})['title'] - except Exception: - logger.log('Unable to parse torrent title. Traceback: %s ' % traceback.format_exc(), logger.WARNING) - continue - - try: - details_url = individual_torrent.find('a', {'class': 'torrent_name_link'})['href'] - torrent_id = int((re.match('.*?([0-9]+)$', details_url).group(1)).strip()) - download_url = self.urls['download'] % (str(torrent_id)) - seeders = try_int(individual_torrent.find('td', {'class': 'table_seeders'}).find('span').text.strip(), 1) - leechers = try_int(individual_torrent.find('td', {'class': 'table_leechers'}).find('a').text.strip(), 0) - torrent_size = individual_torrent.find('td', {'class': 'table_size'}).get_text() - size = convert_size(torrent_size) or -1 - except Exception: - continue - + title = individual_torrent.find('a', {'class': 'torrent_name_link'})['title'] + details_url = individual_torrent.find('a', {'class': 'torrent_name_link'})['href'] + torrent_id = int((re.match('.*?([0-9]+)$', details_url).group(1)).strip()) + download_url = self.urls['download'] % (str(torrent_id)) if not all([title, download_url]): continue + seeders = try_int(individual_torrent.find('td', {'class': 'table_seeders'}).find('span').text.strip(), 1) + leechers = try_int(individual_torrent.find('td', {'class': 'table_leechers'}).find('a').text.strip(), 0) + # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': @@ -220,6 +208,9 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man (title, seeders), logger.DEBUG) continue + torrent_size = individual_torrent.find('td', {'class': 'table_size'}).get_text() + size = convert_size(torrent_size) or -1 + item = { 'title': title, 'link': download_url, @@ -235,14 +226,14 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man items.append(item) - except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) - continue + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue - results += items + results += items - return results + return results provider = FreshOnTVProvider() diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py index e95b781cb2..fcfbfba75d 100644 --- a/sickbeard/providers/iptorrents.py +++ b/sickbeard/providers/iptorrents.py @@ -56,7 +56,8 @@ def __init__(self): def _check_auth(self): if not self.username or not self.password: - raise AuthException('Your authentication credentials for {0} are missing, check your config.'.format(self.name)) + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) return True @@ -81,7 +82,8 @@ def login(self): # You tried too often, please try again after 2 hours! if re.search('You tried too often', response): - logger.log('You tried too often, please try again after 2 hours! Disable IPTorrents for at least 2 hours', logger.WARNING) + logger.log('You tried too often, please try again after 2 hours!' + ' Disable IPTorrents for at least 2 hours', logger.WARNING) return False return True @@ -96,6 +98,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': From 5d96054682b9ce2e5008a1e5a3f9c93fb44e8ca3 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 19:00:53 +0200 Subject: [PATCH 048/134] More improvements and cleanups --- sickbeard/providers/hd4free.py | 3 ++- sickbeard/providers/newznab.py | 2 ++ sickbeard/providers/pretome.py | 1 + sickbeard/providers/scenetime.py | 1 + sickbeard/providers/t411.py | 1 + sickbeard/providers/thepiratebay.py | 1 + sickbeard/providers/tokyotoshokan.py | 1 + sickbeard/providers/transmitthenet.py | 3 ++- sickbeard/providers/zooqle.py | 2 +- 9 files changed, 12 insertions(+), 3 deletions(-) diff --git a/sickbeard/providers/hd4free.py b/sickbeard/providers/hd4free.py index 3cfa9e65c1..3a456f0f61 100644 --- a/sickbeard/providers/hd4free.py +++ b/sickbeard/providers/hd4free.py @@ -65,6 +65,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if self.freeleech: search_params['fl'] = 'true' @@ -95,7 +96,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if jdata['0']['total_results'] == 0: logger.log('Provider has no results for this search', logger.DEBUG) continue - except StandardError: + except KeyError: continue for i in jdata: diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index 8365639725..792f69e18e 100644 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -313,7 +313,9 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: + if mode != 'RSS': logger.log('Search string: {0}'.format(search_string), logger.DEBUG) diff --git a/sickbeard/providers/pretome.py b/sickbeard/providers/pretome.py index 4b289f87cb..6aa5da0664 100644 --- a/sickbeard/providers/pretome.py +++ b/sickbeard/providers/pretome.py @@ -91,6 +91,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': diff --git a/sickbeard/providers/scenetime.py b/sickbeard/providers/scenetime.py index 7ab271c546..35f99c3cfa 100644 --- a/sickbeard/providers/scenetime.py +++ b/sickbeard/providers/scenetime.py @@ -80,6 +80,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': diff --git a/sickbeard/providers/t411.py b/sickbeard/providers/t411.py index c81601ee5d..767d57c107 100644 --- a/sickbeard/providers/t411.py +++ b/sickbeard/providers/t411.py @@ -92,6 +92,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: if mode != 'RSS': diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py index c9e0498cb5..575f08a8ea 100644 --- a/sickbeard/providers/thepiratebay.py +++ b/sickbeard/providers/thepiratebay.py @@ -89,6 +89,7 @@ def process_column_header(th): logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: + search_url = self.urls['search'] if mode != 'RSS' else self.urls['rss'] if self.custom_url: if not validators.url(self.custom_url): diff --git a/sickbeard/providers/tokyotoshokan.py b/sickbeard/providers/tokyotoshokan.py index bed2eb9351..234cab59c3 100644 --- a/sickbeard/providers/tokyotoshokan.py +++ b/sickbeard/providers/tokyotoshokan.py @@ -58,6 +58,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: + if mode != 'RSS': logger.log('Search string: {0}'.format(search_string), logger.DEBUG) diff --git a/sickbeard/providers/transmitthenet.py b/sickbeard/providers/transmitthenet.py index 6730ac8104..d329750538 100644 --- a/sickbeard/providers/transmitthenet.py +++ b/sickbeard/providers/transmitthenet.py @@ -62,7 +62,8 @@ def __init__(self): def _check_auth(self): if not self.username or not self.password: - raise AuthException('Your authentication credentials for {0} are missing, check your config.'.format(self.name)) + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) return True diff --git a/sickbeard/providers/zooqle.py b/sickbeard/providers/zooqle.py index fd69dc1240..ce8944ca7d 100644 --- a/sickbeard/providers/zooqle.py +++ b/sickbeard/providers/zooqle.py @@ -87,7 +87,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_params = {'q': '{0} category:TV'.format(search_string)} response = self.get_url(self.urls['search'], params=search_params, returns='response') - if not response.text: + if not response or not response.text: logger.log('No data returned from provider', logger.DEBUG) continue From 956c756bf18df441bbc3279f8afb72468403b8da Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 20:05:10 +0200 Subject: [PATCH 049/134] Fix ThePirateBay --- sickbeard/providers/thepiratebay.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py index 575f08a8ea..af7a316b9a 100644 --- a/sickbeard/providers/thepiratebay.py +++ b/sickbeard/providers/thepiratebay.py @@ -57,7 +57,7 @@ def __init__(self): # Proper Strings # Cache - self.cache = tvcache.TVCache(self, min_time=30) # only poll ThePirateBay every 30 minutes max + self.cache = tvcache.TVCache(self, min_time=1) # only poll ThePirateBay every 30 minutes max def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements results = [] @@ -126,9 +126,11 @@ def process_column_header(th): try: cells = result('td') - title = result.find(class_='detName').get_text(strip=True) - download_url = result.find(title='Download this torrent using magnet')['href'] + self._custom_trackers - if 'magnet:?' not in download_url: + title = result.find(class_='detName') + title = title.get_text(strip=True) if title else None + download_url = result.find(title='Download this torrent using magnet') + download_url = download_url['href'] + self._custom_trackers if download_url else None + if download_url and 'magnet:?' not in download_url: logger.log('Invalid ThePirateBay proxy please try another one', logger.DEBUG) continue if not all([title, download_url]): From 83c40c88bc1a576c989f16b25906c95e317be3a7 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 21:42:15 +0200 Subject: [PATCH 050/134] Add size to freshon, cleanup, fix for tvchaosuk --- sickbeard/providers/freshontv.py | 39 ++++++++++++++++++-------------- sickbeard/providers/tvchaosuk.py | 2 +- 2 files changed, 23 insertions(+), 18 deletions(-) diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index 62bf13f452..c247ed6919 100644 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -81,9 +81,9 @@ def login(self): return False if re.search('/logout.php', response): - try: - if dict_from_cookiejar(self.session.cookies)['uid'] and dict_from_cookiejar(self.session.cookies)['pass']: + if dict_from_cookiejar(self.session.cookies)['uid'] and \ + dict_from_cookiejar(self.session.cookies)['pass']: self._uid = dict_from_cookiejar(self.session.cookies)['uid'] self._hash = dict_from_cookiejar(self.session.cookies)['pass'] @@ -92,10 +92,12 @@ def login(self): return True except Exception: logger.log('Unable to login to provider (cookie)', logger.WARNING) - return False + return False else: - if re.search('Username does not exist in the userbase or the account is not confirmed yet.', response): + if re.search('Username does not exist in the userbase or the account is not confirmed yet.', response) or \ + re.search('Username or password is incorrect. If you have an account here please use the' + ' recovery system or try again.', response): logger.log('Invalid username or password. Check your settings', logger.WARNING) if re.search('DDoS protection by CloudFlare', response): @@ -117,8 +119,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) search_url = self.urls['search'] % (freeleech, search_string) @@ -154,7 +155,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if max_page_number > 3 and mode == 'RSS': max_page_number = 3 except Exception: - logger.log('Failed parsing provider. Traceback: %s' % traceback.format_exc(), logger.ERROR) + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue data_response_list = [init_html] @@ -164,7 +166,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for i in range(1, max_page_number): time.sleep(1) - page_search_url = search_url + '&page=' + str(i) + page_search_url = search_url + '&page=' + unicode(i) # '.log('Search string: ' + page_search_url, logger.DEBUG) page_html = self.get_url(page_search_url, returns='text') @@ -176,7 +178,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for data_response in data_response_list: with BS4Parser(data_response, 'html5lib') as html: - torrent_rows = html('tr', {'class': re.compile('torrent_[0-9]*')}) + torrent_rows = html('tr', class_=re.compile('torrent_[0-9]*')) # Continue only if a Release is found if not torrent_rows: @@ -190,25 +192,29 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if individual_torrent.find('img', alt='Nuked') is not None: continue - title = individual_torrent.find('a', {'class': 'torrent_name_link'})['title'] - details_url = individual_torrent.find('a', {'class': 'torrent_name_link'})['href'] + title = individual_torrent.find('a', class_='torrent_name_link')['title'] + details_url = individual_torrent.find('a', class_='torrent_name_link')['href'] torrent_id = int((re.match('.*?([0-9]+)$', details_url).group(1)).strip()) - download_url = self.urls['download'] % (str(torrent_id)) + download_url = self.urls['download'] % (unicode(torrent_id)) if not all([title, download_url]): continue - seeders = try_int(individual_torrent.find('td', {'class': 'table_seeders'}).find('span').text.strip(), 1) - leechers = try_int(individual_torrent.find('td', {'class': 'table_leechers'}).find('a').text.strip(), 0) + seeders = try_int(individual_torrent.find('td', class_='table_seeders').find('span').get_text(strip=True), 1) + leechers = try_int(individual_torrent.find('td', class_='table_leechers').find('a').get_text(strip=True), 0) # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1})'.format + ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue - torrent_size = individual_torrent.find('td', {'class': 'table_size'}).get_text() + torrent_size = individual_torrent.find('td', class_='table_size').get_text(strip=True) + torrent_lenght = len(torrent_size) + torrent_weight = torrent_size[:torrent_lenght - 2] + torrent_unit = torrent_size[torrent_lenght - 2:] + torrent_size = '{0} {1}'.format(torrent_weight, torrent_unit) size = convert_size(torrent_size) or -1 item = { @@ -225,7 +231,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man (title, seeders, leechers), logger.DEBUG) items.append(item) - except (AttributeError, TypeError, KeyError, ValueError, IndexError): logger.log('Failed parsing provider. Traceback: {0!r}'.format (traceback.format_exc()), logger.ERROR) diff --git a/sickbeard/providers/tvchaosuk.py b/sickbeard/providers/tvchaosuk.py index 4890b7cdff..5b3751c856 100644 --- a/sickbeard/providers/tvchaosuk.py +++ b/sickbeard/providers/tvchaosuk.py @@ -166,7 +166,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man size = convert_size(torrent_size, units=units) or -1 item = { - 'title': title, + 'title': title + '.hdtv.x264', 'link': download_url, 'size': size, 'seeders': seeders, From a91faed8d7fadc94bef91ab452f28e5865106fc8 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sat, 11 Jun 2016 20:46:32 +0200 Subject: [PATCH 051/134] Fix for omgwtfnzb, needed a default value, cause getattr doesn't do that by default. --- sickbeard/search.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sickbeard/search.py b/sickbeard/search.py index dc6d891985..a5e9c1f105 100644 --- a/sickbeard/search.py +++ b/sickbeard/search.py @@ -585,7 +585,7 @@ def searchProviders(show, episodes, forced_search=False, downCurQuality=False, m foundResults[cur_provider.name][curEp] = searchResults[curEp] # Sort the list by seeders if possible - if cur_provider.provider_type == 'torrent' or getattr(cur_provider, 'torznab'): + if cur_provider.provider_type == 'torrent' or getattr(cur_provider, 'torznab', None): foundResults[cur_provider.name][curEp].sort(key=lambda d: int(d.seeders), reverse=True) break From c3aca3f1583fe45b760c269dc1f03ad8521a9fd1 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 11 Jun 2016 23:47:01 +0200 Subject: [PATCH 052/134] Improve size parsing code Freshon --- sickbeard/providers/freshontv.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index c247ed6919..1bd2390bc7 100644 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -211,10 +211,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man continue torrent_size = individual_torrent.find('td', class_='table_size').get_text(strip=True) - torrent_lenght = len(torrent_size) - torrent_weight = torrent_size[:torrent_lenght - 2] - torrent_unit = torrent_size[torrent_lenght - 2:] - torrent_size = '{0} {1}'.format(torrent_weight, torrent_unit) + torrent_size = re.split('(\d+.?\d+)', unicode(torrent_size), 1) + torrent_size = '{0} {1}'.format(torrent_size[1], torrent_size[2]) size = convert_size(torrent_size) or -1 item = { From 12eade7387bf90816d59378915e86b2ac976a2c9 Mon Sep 17 00:00:00 2001 From: medariox Date: Sun, 12 Jun 2016 14:03:26 +0200 Subject: [PATCH 053/134] Fixes for ExtraTorrent and HDTorrents --- sickbeard/providers/extratorrent.py | 5 ++++- sickbeard/providers/hdtorrents.py | 3 ++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/sickbeard/providers/extratorrent.py b/sickbeard/providers/extratorrent.py index 9675e1e6a2..21e4305884 100644 --- a/sickbeard/providers/extratorrent.py +++ b/sickbeard/providers/extratorrent.py @@ -85,7 +85,10 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man download_url = enclosure['url'] if enclosure else item.find('link').next.strip() download_url = re.sub(r'(.*)/torrent/(.*).html', r'\1/download/\2.torrent', download_url) else: - info_hash = item.find('info_hash').get_text(strip=True) + info_hash = item.find('info_hash') + if not info_hash: + continue + info_hash = info_hash.get_text(strip=True) download_url = 'magnet:?xt=urn:btih:' + info_hash + '&dn=' + title + self._custom_trackers if not all([title, download_url]): diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index e784d5ba22..f6c72834ee 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -149,7 +149,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man title = cells[labels.index('Filename')].a title = title.get_text(strip=True) if title else None - download_url = self.url + '/' + cells[labels.index('Dl')].a['href'] + download_url = self.url + '/' + cells[labels.index('Dl')].a + download_url = download_url.get('href') if download_url else None if not all([title, download_url]): continue From 4ca3d606b786695b6327f072591f7910abc4ef2d Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sun, 12 Jun 2016 16:49:59 +0200 Subject: [PATCH 054/134] Fixed bithdtv * For when it's not getting back the 750px tables. --- sickbeard/providers/bithdtv.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/sickbeard/providers/bithdtv.py b/sickbeard/providers/bithdtv.py index ac3245528e..78a4444d47 100644 --- a/sickbeard/providers/bithdtv.py +++ b/sickbeard/providers/bithdtv.py @@ -98,8 +98,13 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Need the html.parser, as the html5parser has issues with this site. with BS4Parser(response.text, 'html.parser') as html: - torrent_table = html('table', width='750')[-1] # Get the last table with a width of 750px. - torrent_rows = torrent_table('tr') if torrent_table else [] + all_tables = html('table', width='750') # Get the last table with a width of 750px. + if all_tables: + result_table = all_tables[-1] + else: + continue + + torrent_rows = result_table('tr') if result_table else [] # Continue only if at least one Release is found if len(torrent_rows) < 2: From 267e3594cdd5ca6c7fb4c78f2b3349f46ab09e32 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Mon, 13 Jun 2016 09:28:26 +0200 Subject: [PATCH 055/134] Fix tokyotoshokan provider errors --- sickbeard/providers/tokyotoshokan.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sickbeard/providers/tokyotoshokan.py b/sickbeard/providers/tokyotoshokan.py index 234cab59c3..c8127ef2c2 100644 --- a/sickbeard/providers/tokyotoshokan.py +++ b/sickbeard/providers/tokyotoshokan.py @@ -86,8 +86,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for top, bot in zip(torrent_rows[a::2], torrent_rows[a + 1::2]): try: desc_top = top.find('td', class_='desc-top') - title = desc_top.get_text(strip=True) - download_url = desc_top.find('a')['href'] + title = desc_top.get_text(strip=True) if desc_top else None + download_url = desc_top.find('a')['href'] if desc_top else None if not all([title, download_url]): continue From 4097f9b86daab4f827f49700674ac9c3abe7a956 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Mon, 13 Jun 2016 15:33:26 +0200 Subject: [PATCH 056/134] Fixed properSearch. * listPropers does an sql, but accessing the row, requires it to use b'' --- sickrage/providers/GenericProvider.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py index 71b57b8938..359f17694b 100644 --- a/sickrage/providers/GenericProvider.py +++ b/sickrage/providers/GenericProvider.py @@ -119,9 +119,12 @@ def download_result(self, result): def find_propers(self, search_date=None): results = self.cache.listPropers(search_date) - return [Proper(x['name'], x['url'], datetime.fromtimestamp(x['time']), self.show, x['seeders'], x['leechers'], x['size'], x['pubdate'], x['hash']) for x in results] + return [Proper(x[b'name'], x[b'url'], datetime.fromtimestamp(x[b'time']), self.show, x[b'seeders'], + x[b'leechers'], x[b'size'], x[b'pubdate'], x[b'hash']) for x in results] - def find_search_results(self, show, episodes, search_mode, forced_search=False, download_current_quality=False, manual_search=False, manual_search_type='episode'): # pylint: disable=too-many-branches,too-many-arguments,too-many-locals,too-many-statements + def find_search_results(self, show, episodes, search_mode, forced_search=False, + download_current_quality=False, manual_search=False, + manual_search_type='episode'): # pylint: disable=too-many-branches,too-many-arguments,too-many-locals,too-many-statements self._check_auth() self.show = show From bb656577d90d27dce36e86888a8082b6c395b9ba Mon Sep 17 00:00:00 2001 From: P0psicles Date: Mon, 13 Jun 2016 15:34:57 +0200 Subject: [PATCH 057/134] Added newznab search by search_query fallback, when search by tvdbid does not give back results. --- sickbeard/providers/newznab.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index 792f69e18e..23a05559dd 100644 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -69,6 +69,7 @@ def __init__(self, name, url, key='0', catIDs='5030,5040', search_mode='eponly', self.caps = False self.cap_tv_search = None + self.force_query = False # self.cap_search = None # self.cap_movie_search = None # self.cap_audio_search = None @@ -286,7 +287,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: self.torznab = False search_params = { - 't': 'tvsearch' if 'tvdbid' in str(self.cap_tv_search) else 'search', + 't': 'tvsearch' if 'tvdbid' in str(self.cap_tv_search) and not self.force_query else 'search', 'limit': 100, 'offset': 0, 'cat': self.catIDs.strip(', ') or '5030,5040', @@ -397,6 +398,11 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man results += items + # Reproces but now use force_query = True + if not results and not self.force_query: + self.force_query = True + return self.search(search_strings, ep_obj=ep_obj) + return results def _get_size(self, item): From 0fac929812282c452d1e0061cbe258505b983287 Mon Sep 17 00:00:00 2001 From: medariox Date: Tue, 14 Jun 2016 19:28:30 +0200 Subject: [PATCH 058/134] Fix HDTorrents, use urljoin, partial rewrite --- sickbeard/providers/hdtorrents.py | 88 +++++++++++++------------------ 1 file changed, 38 insertions(+), 50 deletions(-) diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index f6c72834ee..2da56bb428 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -1,5 +1,5 @@ # coding=utf-8 -# Author: Dustyn Gibson +# Orginal author: Dustyn Gibson # # This file is part of Medusa. # @@ -21,7 +21,7 @@ import re import traceback -from requests.compat import quote_plus +from requests.compat import urljoin from requests.utils import dict_from_cookiejar from sickbeard import logger, tvcache @@ -39,25 +39,22 @@ def __init__(self): self.username = None self.password = None + self.minseed = None self.minleech = None self.freeleech = None - self.urls = {'base_url': 'https://hd-torrents.org', - 'login': 'https://hd-torrents.org/login.php', - 'search': 'https://hd-torrents.org/torrents.php?search=%s&active=1&options=0%s', - 'rss': 'https://hd-torrents.org/torrents.php?search=&active=1&options=0%s', - 'home': 'https://hd-torrents.org/%s'} - - self.url = self.urls['base_url'] + self.url = 'https://hd-torrents.org/' + self.urls = { + 'login': urljoin(self.url, 'login.php'), + 'search': urljoin(self.url, 'torrents.php'), + } - self.categories = '&category[]=59&category[]=60&category[]=30&category[]=38' self.proper_strings = ['PROPER', 'REPACK'] self.cache = tvcache.TVCache(self, min_time=30) # only poll HDTorrents every 30 minutes max def _check_auth(self): - if not self.username or not self.password: logger.log('Invalid username or password. Check your settings', logger.WARNING) @@ -67,9 +64,11 @@ def login(self): if any(dict_from_cookiejar(self.session.cookies).values()): return True - login_params = {'uid': self.username, - 'pwd': self.password, - 'submit': 'Confirm'} + login_params = { + 'uid': self.username, + 'pwd': self.password, + 'submit': 'Confirm' + } response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: @@ -87,6 +86,18 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if not self.login(): return results + # Search Params + search_params = { + 'search': '', # BROWSE + 'active': 1, # TV/XVID + 'options': 0, # TV/X264 + 'category[]': 59, # TV/DVDRIP + 'category[]': 60, # TV/BLURAY + 'category[]': 30, # TV/DVDR + 'category[]': 38, # TV/SD + 'category[]': 65, + } + for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) @@ -94,47 +105,23 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: if mode != 'RSS': - search_url = self.urls['search'] % (quote_plus(search_string), self.categories) - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) - else: - search_url = self.urls['rss'] % self.categories + search_params['search'] = search_string + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) if self.freeleech: - search_url = search_url.replace('active=1', 'active=5') + search_params['active'] = 5 - data = self.get_url(search_url, returns='text') - if not data or 'please try later' in data: + response = self.get_url(self.urls['search'], params=search_params, returns='response') + if not response or not response.text: logger.log('No data returned from provider', logger.DEBUG) continue - if data.find('No torrents here') != -1: - logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) - continue - - # Search result page contains some invalid html that prevents html parser from returning all data. - # We cut everything before the table that contains the data we are interested in thus eliminating - # the invalid html portions - try: - index = data.lower().index('
Date: Tue, 14 Jun 2016 19:49:02 +0200 Subject: [PATCH 059/134] Fix rare Zooqle error --- sickbeard/providers/zooqle.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/sickbeard/providers/zooqle.py b/sickbeard/providers/zooqle.py index ce8944ca7d..164e750d44 100644 --- a/sickbeard/providers/zooqle.py +++ b/sickbeard/providers/zooqle.py @@ -113,9 +113,13 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if not all([title, download_url]): continue - peers = cells[6].find('div')['title'].replace(',', '').split(' | ', 1) - seeders = try_int(peers[0].strip('Seeders: ')) - leechers = try_int(peers[1].strip('Leechers: ')) + seeders = 1 + leechers = 0 + peers = cells[6].find('div') + if peers and peers.get('title'): + peers = peers['title'].replace(',', '').split(' | ', 1) + seeders = try_int(peers[0].strip('Seeders: ')) + leechers = try_int(peers[1].strip('Leechers: ')) # Filter unseeded torrent if seeders < min(self.minseed, 1): From 649ace222f2dd81b934b6bd71c25e25e42692dca Mon Sep 17 00:00:00 2001 From: medariox Date: Tue, 14 Jun 2016 21:35:03 +0200 Subject: [PATCH 060/134] Improve HDTorrents, bring back ugly hack --- sickbeard/providers/hdtorrents.py | 36 ++++++++++++++++++++----------- 1 file changed, 24 insertions(+), 12 deletions(-) diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index 2da56bb428..ed68281739 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -28,6 +28,7 @@ from sickbeard.bs4_parser import BS4Parser from sickrage.helper.common import convert_size, try_int +from sickrage.helper.exceptions import AuthException from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -50,13 +51,15 @@ def __init__(self): 'search': urljoin(self.url, 'torrents.php'), } - self.proper_strings = ['PROPER', 'REPACK'] + self.proper_strings = ['PROPER', 'REPACK', 'REAL'] - self.cache = tvcache.TVCache(self, min_time=30) # only poll HDTorrents every 30 minutes max + self.cache = tvcache.TVCache(self, min_time=30) def _check_auth(self): + if not self.username or not self.password: - logger.log('Invalid username or password. Check your settings', logger.WARNING) + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) return True @@ -88,14 +91,14 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Search Params search_params = { - 'search': '', # BROWSE - 'active': 1, # TV/XVID - 'options': 0, # TV/X264 - 'category[]': 59, # TV/DVDRIP - 'category[]': 60, # TV/BLURAY - 'category[]': 30, # TV/DVDR - 'category[]': 38, # TV/SD - 'category[]': 65, + 'search': '', + 'active': 1, + 'options': 0, + 'category[0]': 59, + 'category[1]': 60, + 'category[2]': 30, + 'category[3]': 38, + 'category[4]': 65, } for mode in search_strings: @@ -116,7 +119,16 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('No data returned from provider', logger.DEBUG) continue - with BS4Parser(response.text, 'html5lib') as html: + # Search result page contains some invalid html that prevents html parser from returning all data. + # We cut everything before the table that contains the data we are interested in thus eliminating + # the invalid html portions + try: + index = response.text.index('
Date: Thu, 16 Jun 2016 23:53:39 +0200 Subject: [PATCH 061/134] Improve TNTVillage, fix daily search, much more --- sickbeard/providers/tntvillage.py | 388 +++++++++--------------------- 1 file changed, 108 insertions(+), 280 deletions(-) diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index f258432f00..96578afe81 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -1,5 +1,5 @@ # coding=utf-8 -# Author: Giovanni Borri +# Original author: Giovanni Borri # Modified by gborri, https://github.com/gborri for TNTVillage # # This file is part of Medusa. @@ -22,43 +22,18 @@ import re import traceback +from urlparse import parse_qs + from requests.utils import dict_from_cookiejar +from requests.compat import urljoin -from sickbeard import db, logger, tvcache +from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser -from sickbeard.common import Quality -from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException -from sickrage.helper.common import convert_size +from sickrage.helper.common import convert_size, try_int from sickrage.helper.exceptions import AuthException from sickrage.providers.torrent.TorrentProvider import TorrentProvider -category_excluded = {'Sport': 22, - 'Teatro': 23, - 'Video Musicali': 21, - 'Film': 4, - 'Musica': 2, - 'Students Releases': 13, - 'E Books': 3, - 'Linux': 6, - 'Macintosh': 9, - 'Windows Software': 10, - 'Pc Game': 11, - 'Playstation 2': 12, - 'Wrestling': 24, - 'Varie': 25, - 'Xbox': 26, - 'Immagini sfondi': 27, - 'Altri Giochi': 28, - 'Fumetteria': 30, - 'Trash': 31, - 'PlayStation 1': 32, - 'PSP Portable': 33, - 'A Book': 34, - 'Podcast': 35, - 'Edicola': 36, - 'Mobile': 37} - class TNTVillageProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes @@ -68,52 +43,25 @@ def __init__(self): self._uid = None self._hash = None + self.username = None self.password = None - self.cat = None - self.engrelease = None - self.page = 10 - self.subtitle = None + self.minseed = None self.minleech = None - self.hdtext = [' - Versione 720p', - ' Versione 720p', - ' V 720p', - ' V 720', - ' V HEVC', - ' V HEVC', - ' V 1080', - ' Versione 1080p', - ' 720p HEVC', - ' Ver 720', - ' 720p HEVC', - ' 720p'] - - self.category_dict = {'Serie TV': 29, - 'Cartoni': 8, - 'Anime': 7, - 'Programmi e Film TV': 1, - 'Documentari': 14, - 'All': 0} - - self.urls = {'base_url': 'http://forum.tntvillage.scambioetico.org', - 'login': 'http://forum.tntvillage.scambioetico.org/index.php?act=Login&CODE=01', - 'detail': 'http://forum.tntvillage.scambioetico.org/index.php?showtopic=%s', - 'search': 'http://forum.tntvillage.scambioetico.org/?act=allreleases&%s', - 'search_page': 'http://forum.tntvillage.scambioetico.org/?act=allreleases&st={0}&{1}', - 'download': 'http://forum.tntvillage.scambioetico.org/index.php?act=Attach&type=post&id=%s'} - - self.url = self.urls['base_url'] - - self.sub_string = ['sub', 'softsub'] + self.url = 'http://forum.tntvillage.scambioetico.org/' + self.urls = { + 'login': urljoin(self.url, 'index.php?act=Login&CODE=01'), + 'download': urljoin(self.url, 'index.php?act=Attach&type=post&id={0}'), + } self.proper_strings = ['PROPER', 'REPACK'] - self.categories = 'cat=29' - self.cache = tvcache.TVCache(self, min_time=30) # only poll TNTVillage every 30 minutes max + self.subtitle = None + def _check_auth(self): if not self.username or not self.password: @@ -128,10 +76,12 @@ def login(self): if cookies_dict['pass_hash'] != '0' and cookies_dict['member_id'] != '0': return True - login_params = {'UserName': self.username, - 'PassWord': self.password, - 'CookieDate': 1, - 'submit': 'Connettiti al Forum'} + login_params = { + 'UserName': self.username, + 'PassWord': self.password, + 'CookieDate': 1, + 'submit': 'Connettiti al Forum' + } response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: @@ -145,209 +95,63 @@ def login(self): return True - @staticmethod - def _reverseQuality(quality): - - quality_string = '' - - if quality == Quality.SDTV: - quality_string = ' HDTV x264' - if quality == Quality.SDDVD: - quality_string = ' DVDRIP' - elif quality == Quality.HDTV: - quality_string = ' 720p HDTV x264' - elif quality == Quality.FULLHDTV: - quality_string = ' 1080p HDTV x264' - elif quality == Quality.RAWHDTV: - quality_string = ' 1080i HDTV mpeg2' - elif quality == Quality.HDWEBDL: - quality_string = ' 720p WEB-DL h264' - elif quality == Quality.FULLHDWEBDL: - quality_string = ' 1080p WEB-DL h264' - elif quality == Quality.HDBLURAY: - quality_string = ' 720p Bluray x264' - elif quality == Quality.FULLHDBLURAY: - quality_string = ' 1080p Bluray x264' - - return quality_string - - @staticmethod - def _episodeQuality(torrent_rows): # pylint: disable=too-many-return-statements, too-many-branches - """ - Return The quality from the scene episode HTML row. - """ - file_quality = '' - - img_all = (torrent_rows('td'))[1]('img') - - if img_all: - for img_type in img_all: - try: - file_quality = file_quality + ' ' + img_type['src'].replace('style_images/mkportal-636/', '') - file_quality = file_quality.replace('.gif', '').replace('.png', '') - except Exception: - logger.log('Failed parsing quality. Traceback: %s' % traceback.format_exc(), logger.ERROR) - - else: - file_quality = (torrent_rows('td'))[1].get_text() - logger.log('Episode quality: %s' % file_quality, logger.DEBUG) - - def checkName(options, func): - return func([re.search(option, file_quality, re.I) for option in options]) - - dvdOptions = checkName(['dvd', 'dvdrip', 'dvdmux', 'DVD9', 'DVD5'], any) - bluRayOptions = checkName(['BD', 'BDmux', 'BDrip', 'BRrip', 'Bluray'], any) - sdOptions = checkName(['h264', 'divx', 'XviD', 'tv', 'TVrip', 'SATRip', 'DTTrip', 'Mpeg2'], any) - hdOptions = checkName(['720p'], any) - fullHD = checkName(['1080p', 'fullHD'], any) - - if img_all: - file_quality = (torrent_rows('td'))[1].get_text() - - webdl = checkName(['webdl', 'webmux', 'webrip', 'dl-webmux', 'web-dlmux', - 'webdl-mux', 'web-dl', 'webdlmux', 'dlmux'], any) - - if sdOptions and not dvdOptions and not fullHD and not hdOptions: - return Quality.SDTV - elif dvdOptions: - return Quality.SDDVD - elif hdOptions and not bluRayOptions and not fullHD and not webdl: - return Quality.HDTV - elif not hdOptions and not bluRayOptions and fullHD and not webdl: - return Quality.FULLHDTV - elif hdOptions and not bluRayOptions and not fullHD and webdl: - return Quality.HDWEBDL - elif not hdOptions and not bluRayOptions and fullHD and webdl: - return Quality.FULLHDWEBDL - elif bluRayOptions and hdOptions and not fullHD: - return Quality.HDBLURAY - elif bluRayOptions and fullHD and not hdOptions: - return Quality.FULLHDBLURAY - else: - return Quality.UNKNOWN - - def _is_italian(self, torrent_rows): - - name = str(torrent_rows('td')[1].find('b').find('span')) - if not name or name == 'None': - return False - - sub_found = italian = False - for sub in self.sub_string: - if re.search(sub, name, re.I): - sub_found = True - else: - continue - - if re.search('ita', name.split(sub)[0], re.I): - logger.log('Found Italian release: ' + name, logger.DEBUG) - italian = True - break - - if not sub_found and re.search('ita', name, re.I): - logger.log('Found Italian release: ' + name, logger.DEBUG) - italian = True - - return italian - - @staticmethod - def _is_english(torrent_rows): - - name = str(torrent_rows('td')[1].find('b').find('span')) - if not name or name == 'None': - return False - - english = False - if re.search('eng', name, re.I): - logger.log('Found English release: ' + name, logger.DEBUG) - english = True - - return english - - @staticmethod - def _is_season_pack(name): - - try: - parse_result = NameParser(tryIndexers=True).parse(name) - except (InvalidNameException, InvalidShowException) as error: - logger.log('{0}'.format(error), logger.DEBUG) - return False - - main_db_con = db.DBConnection() - sql_selection = 'select count(*) as count from tv_episodes where showid = ? and season = ?' - episodes = main_db_con.select(sql_selection, [parse_result.show.indexerid, parse_result.season_number]) - if int(episodes[0][b'count']) == len(parse_result.episode_numbers): - return True - - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements + def search(self, search_strings, age=0, ep_obj=None): results = [] if not self.login(): return results - self.categories = 'cat=' + str(self.cat) + search_params = { + 'act': 'allreleases', + 'filter': '', + } for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) - for search_string in search_strings[mode]: - - if mode == 'RSS': - self.page = 2 - - last_page = 0 - y = int(self.page) - - if search_string == '': - continue - search_string = str(search_string).replace('.', ' ') - - for x in range(0, y): - z = x * 20 - if last_page: - break + for search_string in search_strings[mode]: if mode != 'RSS': - search_url = (self.urls['search_page'] + '&filter={2}').format(z, self.categories, search_string) + search_params['filter'] = search_string + logger.log('Search string: {0}'.format(search_string), logger.DEBUG) else: - search_url = self.urls['search_page'].format(z, self.categories) - - if mode != 'RSS': - logger.log('Search string: {0}'.format - (search_string), logger.DEBUG) + search_params['cat'] = 29 - data = self.get_url(search_url, returns='text') - if not data: + response = self.get_url(self.url, params=search_params, returns='response') + if not response or not response.text: logger.log('No data returned from provider', logger.DEBUG) continue - with BS4Parser(data, 'html5lib') as html: - torrent_table = html.find('table', attrs={'class': 'copyright'}) + with BS4Parser(response.text, 'html5lib') as html: + torrent_table = html.find('table', class_='copyright') torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if one Release is found + # Continue only if one release is found if len(torrent_rows) < 3: logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) - last_page = 1 continue - if len(torrent_rows) < 42: - last_page = 1 - - for result in torrent_table('tr')[2:]: + for result in torrent_table('tr')[1:]: try: - link = result.find('td').find('a') - title = link.string if link else None - dl_link = result('td') - dl_url = dl_link[8].find('a')['href'][-8:] if len(dl_link) > 7 else None - download_url = self.urls['download'] % dl_url if dl_url else None + cells = result('td') + if not cells: + continue + + last_cell_anchor = cells[-1].find('a') + if not last_cell_anchor: + continue + params = parse_qs(last_cell_anchor.get('href', '')) + download_url = self.urls['download'].format(params['pid'][0]) if \ + params.get('pid') else None + title = _normalize_title(cells[0], cells[1], mode) if not all([title, download_url]): continue - leechers = result('td')[3]('td')[1].text - leechers = int(leechers.strip('[]')) - seeders = result('td')[3]('td')[2].text - seeders = int(seeders.strip('[]')) + info_cell = cells[3].find_all('td') + leechers = info_cell[0].find('span').get_text(strip=True) + leechers = try_int(leechers) + seeders = info_cell[1].find('span').get_text() + seeders = try_int(seeders, 1) # Filter unseeded torrent if seeders < min(self.minseed, 1): @@ -357,38 +161,12 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man (title, seeders), logger.DEBUG) continue - filename_qt = self._reverseQuality(self._episodeQuality(result)) - for text in self.hdtext: - title1 = title - title = title.replace(text, filename_qt) - if title != title1: - break - - if Quality.nameQuality(title) == Quality.UNKNOWN: - title += filename_qt - - if not self._is_italian(result) and not self.subtitle: - logger.log('Torrent is subtitled, skipping: %s ' % title, logger.DEBUG) + if _has_only_subs(title) and not self.subtitle: + logger.log('Torrent is only subtitled, skipping: {0}'.format + (title), logger.DEBUG) continue - if self.engrelease and not self._is_english(result): - logger.log('Torrent isnt english audio/subtitled, skipping: %s ' % title, logger.DEBUG) - continue - - search_show = re.split(r'([Ss][\d{1,2}]+)', search_string)[0] - show_title = search_show - rindex = re.search(r'([Ss][\d{1,2}]+)', title) - if rindex: - show_title = title[:rindex.start()] - ep_params = title[rindex.start():] - if show_title.lower() != search_show.lower() and search_show.lower() in show_title.lower(): - new_title = search_show + ep_params - title = new_title - - if self._is_season_pack(title): - title = re.sub(r'([Ee][\d{1,2}\-?]+)', '', title) - - torrent_size = result('td')[3]('td')[3].text.strip('[]') + ' GB' + torrent_size = info_cell[3].find('span').get_text() + ' GB' size = convert_size(torrent_size) or -1 item = { @@ -410,9 +188,59 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man (traceback.format_exc()), logger.ERROR) continue - results += items + results += items + + return results + + +def _normalize_title(title, info, mode): + + result_title = title.find('a').get_text() + result_info = info.find('span') + + if not result_info: + return None + + bad_words = ['[cura]', 'hot', 'season', 'stagione', 'series', 'premiere', 'finale', 'fine', + 'full', 'Completa', 'supereroi', 'commedia', 'drammatico', 'poliziesco', 'azione', + 'giallo', 'politico', 'sitcom', 'funzionante'] + + formatted_info = '' + for info_part in result_info: + if mode == 'RSS': + try: + info_part = info_part.get('src') + info_part = info_part.replace('style_images/mkportal-636/', '') + info_part = info_part.replace('.gif', '').replace('.png', '') + if info_part == 'dolby': + info_part = 'Ac3' + elif info_part == 'fullHd': + info_part = '1080p' + except AttributeError: + info_part = info_part.replace('·', '').replace(',', '') + info_part = info_part.replace('by', '-').strip() + formatted_info += ' ' + info_part + else: + formatted_info = info_part + + allowed_words = [word for word in formatted_info.split() if word.lower() not in bad_words] + final_title = '{0} '.format(result_title) + ' '.join(allowed_words).strip('-').strip() + + return final_title + - return results +def _has_only_subs(title): + + title = title.lower() + + if 'sub' in title: + title = title.split() + counter = 0 + for word in title: + if 'ita' in word: + counter = counter + 1 + if counter < 2: + return True provider = TNTVillageProvider() From 71cb481a79c00152d5d23c2a510f120eed976023 Mon Sep 17 00:00:00 2001 From: labrys Date: Fri, 17 Jun 2016 00:58:11 -0400 Subject: [PATCH 062/134] Fix BIT-HDTV --- sickbeard/providers/bithdtv.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sickbeard/providers/bithdtv.py b/sickbeard/providers/bithdtv.py index 78a4444d47..f59e453425 100644 --- a/sickbeard/providers/bithdtv.py +++ b/sickbeard/providers/bithdtv.py @@ -152,7 +152,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('Found result: {0} with {1} seeders and {2} leechers'.format (title, seeders, leechers), logger.DEBUG) - items.append(item) + items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): logger.log('Failed parsing provider. Traceback: {0!r}'.format (traceback.format_exc()), logger.ERROR) From ffd321228421d363561f2f6e5b2bb7ab6f662455 Mon Sep 17 00:00:00 2001 From: labrys Date: Fri, 17 Jun 2016 00:56:16 -0400 Subject: [PATCH 063/134] More standardization --- sickbeard/providers/abnormal.py | 52 ++++++---- sickbeard/providers/alpharatio.py | 27 ++++-- sickbeard/providers/anizb.py | 20 ++-- sickbeard/providers/binsearch.py | 18 +++- sickbeard/providers/bitcannon.py | 48 ++++++--- sickbeard/providers/bithdtv.py | 9 +- sickbeard/providers/bitsnoop.py | 48 ++++++--- sickbeard/providers/bluetigers.py | 140 ++++++++++++++++----------- sickbeard/providers/btdigg.py | 34 ++++--- sickbeard/providers/btn.py | 53 ++++++---- sickbeard/providers/cpasbien.py | 34 +++++-- sickbeard/providers/danishbits.py | 37 ++++--- sickbeard/providers/elitetorrent.py | 91 +++++++++-------- sickbeard/providers/extratorrent.py | 62 ++++++++---- sickbeard/providers/freshontv.py | 57 +++++++---- sickbeard/providers/gftracker.py | 40 +++++--- sickbeard/providers/hd4free.py | 56 ++++++++--- sickbeard/providers/hdbits.py | 18 +++- sickbeard/providers/hdspace.py | 60 ++++++++---- sickbeard/providers/hdtorrents.py | 43 +++++--- sickbeard/providers/hounddawgs.py | 87 ++++++++++------- sickbeard/providers/ilovetorrents.py | 58 ++++++----- sickbeard/providers/iptorrents.py | 63 ++++++------ sickbeard/providers/kat.py | 26 +++-- sickbeard/providers/limetorrents.py | 30 +++--- sickbeard/providers/morethantv.py | 43 +++++--- sickbeard/providers/newpct.py | 71 ++++++++------ sickbeard/providers/newznab.py | 9 +- sickbeard/providers/norbits.py | 40 +++++--- sickbeard/providers/nyaatorrents.py | 43 +++++--- 30 files changed, 915 insertions(+), 502 deletions(-) diff --git a/sickbeard/providers/abnormal.py b/sickbeard/providers/abnormal.py index 35d7c55229..aed5692913 100644 --- a/sickbeard/providers/abnormal.py +++ b/sickbeard/providers/abnormal.py @@ -32,7 +32,7 @@ class ABNormalProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """ABNormal Torrent provider""" def __init__(self): # Provider Init @@ -42,10 +42,6 @@ def __init__(self): self.username = None self.password = None - # Torrent Stats - self.minseed = None - self.minleech = None - # URLs self.url = 'https://abnormal.ws' self.urls = { @@ -56,6 +52,12 @@ def __init__(self): # Proper Strings self.proper_strings = ['PROPER'] + # Miscellaneous Options + + # Torrent Stats + self.minseed = None + self.minleech = None + # Cache self.cache = tvcache.TVCache(self, min_time=30) @@ -80,16 +82,32 @@ def login(self): return True def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + ABNormal search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self.login(): return results # Search Params search_params = { - 'cat[]': ['TV|SD|VOSTFR', 'TV|HD|VOSTFR', 'TV|SD|VF', 'TV|HD|VF', - 'TV|PACK|FR', 'TV|PACK|VOSTFR', 'TV|EMISSIONS', 'ANIME'], - # Both ASC and DESC are available for sort direction - 'way': 'DESC' + 'cat[]': [ + 'TV|SD|VOSTFR', + 'TV|HD|VOSTFR', + 'TV|SD|VF', + 'TV|HD|VF', + 'TV|PACK|FR', + 'TV|PACK|VOSTFR', + 'TV|EMISSIONS', + 'ANIME', + ], + 'order': 'Time', # Sorting: Available parameters: ReleaseName, Seeders, Leechers, Snatched, Size + 'way': 'DESC', # Both ASC and DESC are available for sort direction } # Units @@ -97,19 +115,19 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) + search_params['order'] = 'Seeders' - # Sorting: Available parameters: ReleaseName, Seeders, Leechers, Snatched, Size - search_params['order'] = ('Seeders', 'Time')[mode == 'RSS'] search_params['search'] = re.sub(r'[()]', '', search_string) data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: + logger.log('No data returned from provider', logger.DEBUG) continue with BS4Parser(data, 'html5lib') as html: @@ -143,8 +161,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - 'minimum seeders: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -159,7 +177,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/alpharatio.py b/sickbeard/providers/alpharatio.py index a66ed392a9..5be2d498fc 100644 --- a/sickbeard/providers/alpharatio.py +++ b/sickbeard/providers/alpharatio.py @@ -32,7 +32,7 @@ class AlphaRatioProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """AlphaRatio Torrent provider""" def __init__(self): # Provider Init @@ -42,10 +42,6 @@ def __init__(self): self.username = None self.password = None - # Torrent Stats - self.minseed = None - self.minleech = None - # URLs self.url = 'http://alpharatio.cc' self.urls = { @@ -56,6 +52,12 @@ def __init__(self): # Proper Strings self.proper_strings = ['PROPER', 'REPACK'] + # Torrent Stats + self.minseed = None + self.minleech = None + + # Miscellaneous Options + # Cache self.cache = tvcache.TVCache(self) @@ -83,6 +85,14 @@ def login(self): return True def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + AlphaRatio search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self.login(): return results @@ -110,9 +120,10 @@ def process_column_header(td): for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: + if mode != 'RSS': logger.log('Search string: {search}'.format (search=search_string), logger.DEBUG) @@ -155,8 +166,8 @@ def process_column_header(td): # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/anizb.py b/sickbeard/providers/anizb.py index 2eb4a3f133..a97efe0221 100644 --- a/sickbeard/providers/anizb.py +++ b/sickbeard/providers/anizb.py @@ -37,17 +37,24 @@ def __init__(self): # Provider Init NZBProvider.__init__(self, 'Anizb') + # Credentials self.public = True - self.supports_absolute_numbering = True - self.anime_only = True + # URLs self.url = 'https://anizb.org/' self.urls = { 'rss': self.url, 'api': urljoin(self.url, 'api/?q=') } + # Proper Strings + # Miscellaneous Options + self.supports_absolute_numbering = True + self.anime_only = True + + # Torrent Stats + # Cache self.cache = tvcache.TVCache(self) @@ -58,21 +65,20 @@ def _get_size(self, item): def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals """Start searching for anime using the provided search_strings. Used for backlog and daily""" results = [] - if self.show and not self.show.is_anime: return results for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_url = (self.urls['rss'], self.urls['api'] + search_string)[mode != 'RSS'] - data = self.get_url(search_url, returns='text') if not data: logger.log('No data returned from provider', logger.DEBUG) @@ -101,7 +107,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man item = { 'title': title, 'link': download_url, - 'size': size + 'size': size, } items.append(item) diff --git a/sickbeard/providers/binsearch.py b/sickbeard/providers/binsearch.py index 26135267b6..e63ff6cbd2 100644 --- a/sickbeard/providers/binsearch.py +++ b/sickbeard/providers/binsearch.py @@ -28,17 +28,27 @@ class BinSearchProvider(NZBProvider): - + """BinSearch Newznab provider""" def __init__(self): + # Provider Init NZBProvider.__init__(self, 'BinSearch') - self.url = 'https://www.binsearch.info' - self.urls = {'rss': urljoin(self.url, 'rss.php')} - + # Credentials self.public = True self.supports_backlog = False + # URLs + self.url = 'https://www.binsearch.info' + self.urls = { + 'rss': urljoin(self.url, 'rss.php') + } + + # Proper Strings + + # Miscellaneous Options + + # Cache self.cache = BinSearchCache(self, min_time=30) # only poll Binsearch every 30 minutes max diff --git a/sickbeard/providers/bitcannon.py b/sickbeard/providers/bitcannon.py index a7abf41ae2..de351f34cb 100644 --- a/sickbeard/providers/bitcannon.py +++ b/sickbeard/providers/bitcannon.py @@ -30,21 +30,39 @@ class BitCannonProvider(TorrentProvider): - + """BitCannon Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'BitCannon') + # Credentials + self.api_key = None + + # URLs + self.custom_url = None + + # Proper Strings + + # Miscellaneous Options + + # Torrent Stats self.minseed = None self.minleech = None - self.custom_url = None - self.api_key = None + # Cache self.cache = tvcache.TVCache(self, search_params={'RSS': ['tv', 'anime']}) def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals + """ + BitCannon search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] - url = 'http://localhost:3000/' if self.custom_url: if not validators.url(self.custom_url, require_tld=False): @@ -52,21 +70,21 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results url = self.custom_url - search_params = {} - - anime = ep_obj and ep_obj.show and ep_obj.show.anime - search_params['category'] = ('tv', 'anime')[bool(anime)] - - if self.api_key: - search_params['apiKey'] = self.api_key + # Search Params + search_params = { + 'category': 'anime' if ep_obj and ep_obj.show and ep_obj.show.anime else 'tv', + 'apiKey': self.api_key + } for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: search_params['q'] = search_string if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_url = urljoin(url, 'api/search') parsed_json = self.get_url(search_url, params=search_params, returns='json') @@ -93,10 +111,11 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man else: seeders = leechers = 0 + # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the " - 'minimum seeders: {0}. Seeders: {1})'.format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -119,6 +138,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man except (AttributeError, TypeError, KeyError, ValueError, IndexError): logger.log('Failed parsing provider. Traceback: {0!r}'.format (traceback.format_exc()), logger.ERROR) + continue results += items diff --git a/sickbeard/providers/bithdtv.py b/sickbeard/providers/bithdtv.py index f59e453425..8348413c76 100644 --- a/sickbeard/providers/bithdtv.py +++ b/sickbeard/providers/bithdtv.py @@ -87,6 +87,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if mode != 'RSS': search_params['search'] = search_string + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) if mode == 'Season': search_params['cat'] = 12 @@ -96,8 +98,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('No data returned from provider', logger.DEBUG) continue - # Need the html.parser, as the html5parser has issues with this site. - with BS4Parser(response.text, 'html.parser') as html: + with BS4Parser(response.text, 'html.parser') as html: # Use html.parser, since html5parser has issues with this site. all_tables = html('table', width='750') # Get the last table with a width of 750px. if all_tables: result_table = all_tables[-1] @@ -131,8 +132,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/bitsnoop.py b/sickbeard/providers/bitsnoop.py index bbddffe8a1..f404d993ec 100644 --- a/sickbeard/providers/bitsnoop.py +++ b/sickbeard/providers/bitsnoop.py @@ -19,8 +19,10 @@ from __future__ import unicode_literals import traceback -import sickbeard +from requests.compat import urljoin + +import sickbeard from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser @@ -29,41 +31,57 @@ class BitSnoopProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """BitSnoop Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'BitSnoop') + # Credentials + self.public = True + + # URLs + self.url = 'http://bitsnoop.com' self.urls = { - 'index': 'http://bitsnoop.com', - 'search': 'http://bitsnoop.com/search/video/', - 'rss': 'http://bitsnoop.com/new_video.html?fmt=rss' + 'index': self.url, + 'search': urljoin(self.url, '/search/video/'), + 'rss': urljoin(self.url, '/new_video.html?fmt=rss'), } - self.url = self.urls['index'] + # Proper Strings + self.proper_strings = ['PROPER', 'REPACK'] - self.public = True + # Miscellaneous Options + + # Torrent Stats self.minseed = None self.minleech = None - self.proper_strings = ['PROPER', 'REPACK'] - + # Cache self.cache = tvcache.TVCache(self, search_params={'RSS': ['rss']}) def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches,too-many-locals + """ + BitSnoop search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_url = (self.urls['rss'], self.urls['search'] + search_string + '/s/d/1/?fmt=rss')[mode != 'RSS'] - data = self.get_url(search_url, returns='text') if not data: logger.log('No data returned from provider', logger.DEBUG) @@ -98,8 +116,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -114,7 +132,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': info_hash + 'hash': info_hash, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/bluetigers.py b/sickbeard/providers/bluetigers.py index d7adffb050..8a0808fa83 100644 --- a/sickbeard/providers/bluetigers.py +++ b/sickbeard/providers/bluetigers.py @@ -21,6 +21,7 @@ import re import traceback +from requests.compat import urljoin from requests.utils import dict_from_cookiejar from sickbeard import logger, tvcache @@ -30,29 +31,36 @@ class BlueTigersProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """BlueTigers Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'BLUETIGERS') + # Credentials self.username = None self.password = None self.token = None - self.cache = tvcache.TVCache(self, min_time=10) # Only poll BLUETIGERS every 10 minutes max - + # URLs + self.url = 'https://www.bluetigers.ca/' self.urls = { - 'base_url': 'https://www.bluetigers.ca/', - 'search': 'https://www.bluetigers.ca/torrents-search.php', - 'login': 'https://www.bluetigers.ca/account-login.php', - 'download': 'https://www.bluetigers.ca/torrents-details.php?id=%s&hit=1', + 'base_url': self.url, + 'search': urljoin(self.url, 'torrents-search.php'), + 'login': urljoin(self.url, 'account-login.php'), + 'download': urljoin(self.url, 'torrents-details.php?id=%s&hit=1'), } - self.search_params = { - 'c16': 1, 'c10': 1, 'c130': 1, 'c131': 1, 'c17': 1, 'c18': 1, 'c19': 1, 'c9': 1 - } + # Proper Strings - self.url = self.urls['base_url'] + # Miscellaneous Options + + # Torrent Stats + self.minseed = None + self.minleech = None + + # Cache + self.cache = tvcache.TVCache(self, min_time=10) # Only poll BLUETIGERS every 10 minutes max def login(self): if any(dict_from_cookiejar(self.session.cookies).values()): @@ -65,7 +73,6 @@ def login(self): } response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: check_login = self.get_url(self.urls['base_url'], returns='text') if re.search('account-logout.php', check_login): @@ -81,74 +88,95 @@ def login(self): return True def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals + """ + BLUETIGERS search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] - if not self.login(): return results + # Search Params + search_params = { + 'c9': 1, + 'c10': 1, + 'c16': 1, + 'c17': 1, + 'c18': 1, + 'c19': 1, + 'c130': 1, + 'c131': 1, + } + for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) - self.search_params['search'] = search_string - - data = self.get_url(self.urls['search'], params=self.search_params, returns='text') + search_params['search'] = search_string + data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: continue with BS4Parser(data, 'html5lib') as html: result_linkz = html('a', href=re.compile('torrents-details')) + # Continue only if at least one release is found if not result_linkz: - logger.log('Data returned from provider do not contains any torrent', logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue - if result_linkz: - for link in result_linkz: - try: - title = link.text - download_url = self.urls['base_url'] + link['href'] - download_url = download_url.replace('torrents-details', 'download') - # FIXME - size = -1 - seeders = 1 - leechers = 0 - - if not title or not download_url: - continue - - # Filter unseeded torrent - # if seeders < min(self.minseed, 1): - # if mode != 'RSS': - # logger.log('Discarding torrent because it doesn't meet the minimum seeders: {0}. Seeders: {1})'.format - # (title, seeders), logger.DEBUG) - # continue - - item = { - 'title': title, - 'link': download_url, - 'size': size, - 'seeders': seeders, - 'leechers': leechers, - 'pubdate': None, - 'hash': None - } - if mode != 'RSS': - logger.log('Found result: {0} with {1} seeders and {2} leechers'.format - (title, seeders, leechers), logger.DEBUG) + for link in result_linkz: + try: + title = link.text + download_url = self.urls['base_url'] + link['href'] + download_url = download_url.replace('torrents-details', 'download') + if not all([title, download_url]): + continue - items.append(item) + # FIXME + seeders = 1 + leechers = 0 - except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) + # Filter unseeded torrent + if seeders < min(self.minseed, 1): + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format + (title, seeders), logger.DEBUG) + continue + + # FIXME + size = -1 + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None, + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) + + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) continue - results += items + results += items return results diff --git a/sickbeard/providers/btdigg.py b/sickbeard/providers/btdigg.py index bd737da185..8e735c3710 100644 --- a/sickbeard/providers/btdigg.py +++ b/sickbeard/providers/btdigg.py @@ -28,7 +28,7 @@ class BTDiggProvider(TorrentProvider): - + """BTDigg Torrent provider""" def __init__(self): # Provider Init @@ -36,18 +36,22 @@ def __init__(self): self.public = True - # Torrent Stats - self.minseed = None - self.minleech = None - # URLs self.url = 'https://btdigg.org' - self.urls = {'api': 'https://api.btdigg.org/api/private-341ada3245790954/s02'} + self.urls = { + 'api': 'https://api.btdigg.org/api/private-341ada3245790954/s02', + } self.custom_url = None # Proper Strings self.proper_strings = ['PROPER', 'REPACK'] + # Miscellaneous Options + + # Torrent Stats + self.minseed = None + self.minleech = None + # Use this hacky way for RSS search since most results will use this codecs cache_params = {'RSS': ['x264', 'x264.HDTV', '720.HDTV.x264']} @@ -56,20 +60,24 @@ def __init__(self): def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals results = [] - search_params = {'p': 0} + + # Search Params + search_params = { + 'p': 0, + 'order': 2, + } for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: search_params['q'] = search_string if mode != 'RSS': search_params['order'] = 0 - logger.log('Search string: {0}'.format(search_string), logger.DEBUG) - else: - search_params['order'] = 2 + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) if self.custom_url: # if not validators.url(self.custom_url): # logger.log('Invalid custom url set, please check your settings', logger.WARNING) @@ -80,7 +88,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man jdata = self.get_url(search_url, params=search_params, returns='json') if not jdata: - logger.log('Provider did not return data', logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue for torrent in jdata: @@ -122,7 +130,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py index dd46b64007..bdd84a1668 100644 --- a/sickbeard/providers/btn.py +++ b/sickbeard/providers/btn.py @@ -36,21 +36,31 @@ class BTNProvider(TorrentProvider): - + """BTN Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'BTN') - self.supports_absolute_numbering = True - + # Credentials self.api_key = None - self.cache = BTNCache(self, min_time=15) # Only poll BTN every 15 minutes max + # URLs + self.url = 'http://broadcasthe.net/' + self.urls = { + 'base_url': 'http://api.btnapps.net', + 'website': self.url, + } + + # Proper Strings + + # Miscellaneous Options + self.supports_absolute_numbering = True - self.urls = {'base_url': 'http://api.btnapps.net', - 'website': 'http://broadcasthe.net/', } + # Torrent Stats - self.url = self.urls['website'] + # Cache + self.cache = BTNCache(self, min_time=15) # Only poll BTN every 15 minutes max def _check_auth(self): if not self.api_key: @@ -71,32 +81,37 @@ def _checkAuthFromData(self, parsed_json): return True def search(self, search_strings, age=0, ep_obj=None): # pylint:disable=too-many-locals - + """ + BTN search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ + results = [] self._check_auth() - results = [] - params = {} - apikey = self.api_key + # Search Params + search_params = { + } # age in seconds if age: - params['age'] = '<=' + str(int(age)) + search_params['age'] = '<=' + str(int(age)) if search_strings: - params.update(search_strings) + search_params.update(search_strings) logger.log('Search string: %s' % search_strings, logger.DEBUG) - parsed_json = self._api_call(apikey, params) + parsed_json = self._api_call(self.apikey, search_params) if not parsed_json: logger.log('No data returned from provider', logger.DEBUG) return results if self._checkAuthFromData(parsed_json): - if 'torrents' in parsed_json: - found_torrents = parsed_json['torrents'] - else: - found_torrents = {} + found_torrents = parsed_json.get('torrents', {}) # We got something, we know the API sends max 1000 results at a time. # See if there are more than 1000 results for our query, if not we @@ -112,7 +127,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint:disable=too-many # +1 because range(1,4) = 1, 2, 3 for page in range(1, pages_needed + 1): - parsed_json = self._api_call(apikey, params, results_per_page, page * results_per_page) + parsed_json = self._api_call(self.apikey, search_params, results_per_page, page * results_per_page) # Note that this these are individual requests and might time out individually. This would result in 'gaps' # in the results. There is no way to fix this though. if 'torrents' in parsed_json: diff --git a/sickbeard/providers/cpasbien.py b/sickbeard/providers/cpasbien.py index 3550c42c1f..b56d8b194c 100644 --- a/sickbeard/providers/cpasbien.py +++ b/sickbeard/providers/cpasbien.py @@ -29,22 +29,36 @@ class CpasbienProvider(TorrentProvider): - + """Cpasbien Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'Cpasbien') + # Credentials self.public = True - self.minseed = None - self.minleech = None + + # URLs self.url = 'http://www.cpasbien.cm' + # Proper Strings self.proper_strings = ['PROPER', 'REPACK'] + + # Miscellaneous Options + + # Torrent Stats + self.minseed = None + self.minleech = None + + # Cache self.cache = tvcache.TVCache(self) def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals results = [] + # Units + units = ['o', 'Ko', 'Mo', 'Go', 'To', 'Po'] + for mode in search_strings: items = [] logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) @@ -52,8 +66,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_url = self.url + '/recherche/' + search_string.replace('.', '-').replace(' ', '-') + '.html,trie-seeds-d' else: search_url = self.url + '/view_cat.php?categorie=series&trie=date-d' @@ -75,16 +89,16 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man seeders = try_int(result.find(class_='up').get_text(strip=True)) leechers = try_int(result.find(class_='down').get_text(strip=True)) + + # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue torrent_size = result.find(class_='poid').get_text(strip=True) - - units = ['o', 'Ko', 'Mo', 'Go', 'To', 'Po'] size = convert_size(torrent_size, units=units) or -1 item = { @@ -94,7 +108,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/danishbits.py b/sickbeard/providers/danishbits.py index 948e82e077..3c4f834d79 100644 --- a/sickbeard/providers/danishbits.py +++ b/sickbeard/providers/danishbits.py @@ -20,6 +20,7 @@ import traceback +from requests.compat import urljoin from requests.utils import dict_from_cookiejar from sickbeard import logger, tvcache @@ -46,14 +47,18 @@ def __init__(self): self.freeleech = True # URLs - self.url = 'https://danishbits.org/' + self.url = 'https://danishbits.org' self.urls = { - 'login': self.url + 'login.php', - 'search': self.url + 'torrents.php', + 'login': urljoin(self.url, 'login.php'), + 'search': urljoin(self.url, 'torrents.php'), } # Proper Strings + # Miscellaneous Options + + # Torrent Stats + # Cache self.cache = tvcache.TVCache(self, min_time=10) # Only poll Danishbits every 10 minutes max @@ -83,6 +88,14 @@ def login(self): return True def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + DanishBits search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self.login(): return results @@ -107,16 +120,15 @@ def process_column_header(td): for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_params['search'] = search_string - data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: logger.log('No data returned from provider', logger.DEBUG) @@ -137,6 +149,9 @@ def process_column_header(td): # Skip column headers for result in torrent_rows[1:]: + cells = result('td') + if len(cells) < len(labels): + continue try: title = result.find(class_='croptorrenttext').get_text(strip=True) @@ -144,16 +159,14 @@ def process_column_header(td): if not all([title, download_url]): continue - cells = result('td') - seeders = try_int(cells[labels.index('Seeders')].get_text(strip=True)) leechers = try_int(cells[labels.index('Leechers')].get_text(strip=True)) # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -171,7 +184,7 @@ def process_column_header(td): 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/elitetorrent.py b/sickbeard/providers/elitetorrent.py index 396b87a77f..5d2a6bbef4 100644 --- a/sickbeard/providers/elitetorrent.py +++ b/sickbeard/providers/elitetorrent.py @@ -21,6 +21,8 @@ import re import traceback +from requests.compat import urljoin + from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser @@ -29,49 +31,59 @@ class elitetorrentProvider(TorrentProvider): - + """EliteTorrent Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'EliteTorrent') - self.onlyspasearch = None - self.minseed = None - self.minleech = None - self.cache = tvcache.TVCache(self) # Only poll EliteTorrent every 20 minutes max + # Credentials + # URLs + self.url = 'http://www.elitetorrent.net' self.urls = { - 'base_url': 'http://www.elitetorrent.net', - 'search': 'http://www.elitetorrent.net/torrents.php' + 'base_url': self.url, + 'search': urljoin(self.url, 'torrents.php') } - self.url = self.urls['base_url'] + # Proper Strings - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches - results = [] - lang_info = '' if not ep_obj or not ep_obj.show else ep_obj.show.lang + # Miscellaneous Options + self.onlyspasearch = None + + # Torrent Stats + self.minseed = None + self.minleech = None + + # Cache + self.cache = tvcache.TVCache(self) # Only poll EliteTorrent every 20 minutes max + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ - Search query: - http://www.elitetorrent.net/torrents.php?cat=4&modo=listado&orden=fecha&pag=1&buscar=fringe - - cat = 4 => Shows - modo = listado => display results mode - orden = fecha => order - buscar => Search show - pag = 1 => page number + EliteTorrent search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) """ + results = [] + lang_info = '' if not ep_obj or not ep_obj.show else ep_obj.show.lang + # Search query: + # http://www.elitetorrent.net/torrents.php?cat=4&modo=listado&orden=fecha&pag=1&buscar=fringe + # Search Params search_params = { - 'cat': 4, - 'modo': 'listado', - 'orden': 'fecha', - 'pag': 1, - 'buscar': '' + 'cat': 4, # Shows + 'modo': 'listado', # display results mode + 'orden': 'fecha', # date order + 'pag': 1, # page number + 'buscar': '', # Search show } for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) # Only search if user conditions are true if self.onlyspasearch and lang_info != 'es' and mode != 'RSS': @@ -79,15 +91,16 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man continue for search_string in search_strings[mode]: + if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_string = re.sub(r'S0*(\d*)E(\d*)', r'\1x\2', search_string) search_params['buscar'] = search_string.strip() if mode != 'RSS' else '' - data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: + logger.log('No data returned from provider', logger.DEBUG) continue with BS4Parser(data, 'html5lib') as html: @@ -99,27 +112,27 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue + # Skip column headers for row in torrent_rows[1:]: try: - download_url = self.urls['base_url'] + row.find('a')['href'] title = self._process_title(row.find('a', class_='nombre')['title']) - seeders = try_int(row.find('td', class_='semillas').get_text(strip=True)) - leechers = try_int(row.find('td', class_='clientes').get_text(strip=True)) - - # Provider does not provide size - size = -1 - + download_url = self.urls['base_url'] + row.find('a')['href'] if not all([title, download_url]): continue + seeders = try_int(row.find('td', class_='semillas').get_text(strip=True)) + leechers = try_int(row.find('td', class_='clientes').get_text(strip=True)) + # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue + size = -1 # Provider does not provide size + item = { 'title': title, 'link': download_url, @@ -127,7 +140,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format @@ -156,7 +169,7 @@ def _process_title(title): title = title.replace('(calidad regular)', 'DVDrip x264') title = title.replace('(calidad media)', 'DVDrip x264') - # Language, all results from this provider have spanish audio, we append it to title (avoid to download undesired torrents) + # Language, all results from this provider have spanish audio, we append it to title (to avoid downloading undesired torrents) title += ' SPANISH AUDIO' title += '-ELITETORRENT' diff --git a/sickbeard/providers/extratorrent.py b/sickbeard/providers/extratorrent.py index 21e4305884..08b73260f0 100644 --- a/sickbeard/providers/extratorrent.py +++ b/sickbeard/providers/extratorrent.py @@ -21,8 +21,10 @@ import re import traceback -import sickbeard +from requests.compat import urljoin + +import sickbeard from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser from sickbeard.common import USER_AGENT @@ -32,41 +34,67 @@ class ExtraTorrentProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """ExtraTorrent Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'ExtraTorrent') + # Credentials + self.public = True + + # URLs + self.url = 'http://extratorrent.cc' self.urls = { - 'index': 'http://extratorrent.cc', - 'rss': 'http://extratorrent.cc/rss.xml', + 'index': self.url, + 'rss': urljoin(self.url, 'rss.xml'), } + self.custom_url = None - self.url = self.urls['index'] + # Proper Strings - self.public = True + # Miscellaneous Options + self.headers.update({'User-Agent': USER_AGENT}) + self.search_params = {'cid': 8} + + # Torrent Stats self.minseed = None self.minleech = None - self.custom_url = None + # Cache self.cache = tvcache.TVCache(self, min_time=30) # Only poll ExtraTorrent every 30 minutes max - self.headers.update({'User-Agent': USER_AGENT}) - self.search_params = {'cid': 8} def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + ExtraTorrent search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] + + # Search Params + search_params = { + 'cid': 8, + 'type': 'rss', + } + for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: + if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), logger.DEBUG) + search_params['type'] = 'search' + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) - self.search_params.update({'type': ('search', 'rss')[mode == 'RSS'], 'search': search_string}) + search_params['search'] = search_string search_url = self.urls['rss'] if not self.custom_url else self.urls['rss'].replace(self.urls['index'], self.custom_url) - - data = self.get_url(search_url, params=self.search_params, returns='text') + data = self.get_url(search_url, params=search_params, returns='text') if not data: logger.log('No data returned from provider', logger.DEBUG) continue @@ -102,8 +130,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -118,7 +146,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index 1bd2390bc7..c09d9b268d 100644 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -22,6 +22,7 @@ import time import traceback +from requests.compat import urljoin from requests.utils import add_dict_to_cookiejar, dict_from_cookiejar from sickbeard import logger, tvcache @@ -32,30 +33,40 @@ class FreshOnTVProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """FreshOnTV Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'FreshOnTV') - self._uid = None - self._hash = None + # Credentials self.username = None self.password = None - self.minseed = None - self.minleech = None - self.freeleech = False + self._uid = None + self._hash = None + self.cookies = None - self.cache = tvcache.TVCache(self) + # URLs + self.url = 'https://freshon.tv' + self.urls = { + 'base_url': self.url, + 'login': urljoin(self.url, 'login.php'), + 'detail': urljoin(self.url, 'details.php?id=%s'), + 'search': urljoin(self.url, 'browse.php?incldead=%s&words=0&cat=0&search=%s'), + 'download': urljoin(self.url, 'download.php?id=%s&type=torrent'), + } - self.urls = {'base_url': 'https://freshon.tv/', - 'login': 'https://freshon.tv/login.php?action=makelogin', - 'detail': 'https://freshon.tv/details.php?id=%s', - 'search': 'https://freshon.tv/browse.php?incldead=%s&words=0&cat=0&search=%s', - 'download': 'https://freshon.tv/download.php?id=%s&type=torrent'} + # Proper Strings - self.url = self.urls['base_url'] + # Miscellaneous Options + self.freeleech = False - self.cookies = None + # Torrent Stats + self.minseed = None + self.minleech = None + + # Cache + self.cache = tvcache.TVCache(self) def _check_auth(self): @@ -68,13 +79,16 @@ def login(self): if any(dict_from_cookiejar(self.session.cookies).values()): return True + login_params = { + 'username': self.username, + 'password': self.password, + 'login': 'submit', + 'action': 'makelogin', + } + if self._uid and self._hash: add_dict_to_cookiejar(self.session.cookies, self.cookies) else: - login_params = {'username': self.username, - 'password': self.password, - 'login': 'submit'} - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: logger.log('Unable to connect to provider', logger.WARNING) @@ -114,12 +128,13 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_url = self.urls['search'] % (freeleech, search_string) @@ -222,7 +237,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/gftracker.py b/sickbeard/providers/gftracker.py index a4b9594c56..18dd66e75a 100644 --- a/sickbeard/providers/gftracker.py +++ b/sickbeard/providers/gftracker.py @@ -22,6 +22,7 @@ import re import traceback +from requests.compat import urljoin from requests.utils import dict_from_cookiejar from sickbeard import logger, tvcache @@ -43,20 +44,22 @@ def __init__(self): self.username = None self.password = None - # Torrent Stats - self.minseed = None - self.minleech = None - # URLs - self.url = 'https://www.thegft.org/' + self.url = 'https://www.thegft.org' self.urls = { - 'login': self.url + 'loginsite.php', - 'search': self.url + 'browse.php', + 'login': urljoin(self.url, 'loginsite.php'), + 'search': urljoin(self.url, 'browse.php'), } # Proper Strings self.proper_strings = ['PROPER', 'REPACK', 'REAL'] + # Miscellaneous Options + + # Torrent Stats + self.minseed = None + self.minleech = None + # Cache self.cache = tvcache.TVCache(self) @@ -91,6 +94,14 @@ def login(self): return True def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + GFT search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self.login(): return results @@ -121,16 +132,15 @@ def process_column_header(td): for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_params['search'] = search_string - data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: logger.log('No data returned from provider', logger.DEBUG) @@ -140,7 +150,7 @@ def process_column_header(td): torrent_table = html.find('div', id='torrentBrowse') torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if at least one Release is found + # Continue only if at least one release is found if len(torrent_rows) < 2: logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue @@ -167,8 +177,8 @@ def process_column_header(td): # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -182,7 +192,7 @@ def process_column_header(td): 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/hd4free.py b/sickbeard/providers/hd4free.py index 3a456f0f61..306f924e38 100644 --- a/sickbeard/providers/hd4free.py +++ b/sickbeard/providers/hd4free.py @@ -21,6 +21,7 @@ import traceback from requests.compat import urljoin + from sickbeard import logger, tvcache from sickrage.helper.common import convert_size, try_int @@ -28,20 +29,32 @@ class HD4FreeProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """HD4Free Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'HD4Free') + # Credentials + self.username = None + self.api_key = None + + # URLs self.url = 'https://hd4free.xyz' - self.urls = {'search': urljoin(self.url, '/searchapi.php')} + self.urls = { + 'search': urljoin(self.url, '/searchapi.php'), + } + + # Proper Strings + # Miscellaneous Options self.freeleech = None - self.username = None - self.api_key = None + + # Torrent Stats self.minseed = None self.minleech = None + # Cache self.cache = tvcache.TVCache(self, min_time=10) # Only poll HD4Free every 10 minutes max def _check_auth(self): @@ -52,39 +65,48 @@ def _check_auth(self): return False def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + HD4Free search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self._check_auth: return results + # Search Params search_params = { 'tv': 'true', 'username': self.username, - 'apikey': self.api_key + 'apikey': self.api_key, + 'fl': 'true' if self.freeleech else None } for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: - if self.freeleech: - search_params['fl'] = 'true' - else: - search_params.pop('fl', '') if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_params['search'] = search_string else: - search_params.pop('search', '') + search_params['search'] = None + try: jdata = self.get_url(self.urls['search'], params=search_params, returns='json') except ValueError: logger.log('No data returned from provider', logger.DEBUG) continue + # Continue only if at least one release is found if not jdata: - logger.log('No data returned from provider', logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue error = jdata.get('error') @@ -108,10 +130,12 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man seeders = jdata[i]['seeders'] leechers = jdata[i]['leechers'] + + # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -125,7 +149,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/hdbits.py b/sickbeard/providers/hdbits.py index 04562fc481..4c3ed2e61c 100644 --- a/sickbeard/providers/hdbits.py +++ b/sickbeard/providers/hdbits.py @@ -29,23 +29,33 @@ class HDBitsProvider(TorrentProvider): - + """HDBits Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'HDBits') + # Credentials self.username = None self.passkey = None - self.cache = HDBitsCache(self, min_time=15) # only poll HDBits every 15 minutes max - + # URLs self.url = 'https://hdbits.org' self.urls = { 'search': urljoin(self.url, '/api/torrents'), 'rss': urljoin(self.url, '/api/torrents'), - 'download': urljoin(self.url, '/download.php') + 'download': urljoin(self.url, '/download.php'), } + # Proper Strings + + # Miscellaneous Options + + # Torrent Stats + + # Cache + self.cache = HDBitsCache(self, min_time=15) # only poll HDBits every 15 minutes max + def _check_auth(self): if not self.username or not self.passkey: diff --git a/sickbeard/providers/hdspace.py b/sickbeard/providers/hdspace.py index 805d409bd9..d58f71e753 100644 --- a/sickbeard/providers/hdspace.py +++ b/sickbeard/providers/hdspace.py @@ -34,23 +34,28 @@ class HDSpaceProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """HDSpace Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'HDSpace') + # Credentials self.username = None self.password = None - self.minseed = None - self.minleech = None - self.cache = tvcache.TVCache(self, min_time=10) # only poll HDSpace every 10 minutes max + # URLs + self.url = 'https://hd-space.org' + self.urls = { + 'base_url': self.url, + 'login': 'https://hd-space.org/index.php', + 'search': 'https://hd-space.org/index.php?page=torrents&search=%s&active=1&options=0', + 'rss': 'https://hd-space.org/rss_torrents.php?feed=dl', + } - self.urls = {'base_url': 'https://hd-space.org/', - 'login': 'https://hd-space.org/index.php?page=login', - 'search': 'https://hd-space.org/index.php?page=torrents&search=%s&active=1&options=0', - 'rss': 'https://hd-space.org/rss_torrents.php?feed=dl'} + # Proper Strings + # Miscellaneous Options self.categories = [15, 21, 22, 24, 25, 40] # HDTV/DOC 1080/720, bluray, remux self.urls['search'] += '&category=' for cat in self.categories: @@ -58,7 +63,12 @@ def __init__(self): self.urls['rss'] += '&cat[]=' + str(cat) self.urls['search'] = self.urls['search'][:-4] # remove extra %%3B - self.url = self.urls['base_url'] + # Torrent Stats + self.minseed = None + self.minleech = None + + # Cache + self.cache = tvcache.TVCache(self, min_time=10) # only poll HDSpace every 10 minutes max def _check_auth(self): @@ -74,8 +84,11 @@ def login(self): if 'pass' in dict_from_cookiejar(self.session.cookies): return True - login_params = {'uid': self.username, - 'pwd': self.password} + login_params = { + 'uid': self.username, + 'pwd': self.password, + 'page': 'login', + } response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: @@ -88,25 +101,32 @@ def login(self): return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals, too-many-statements + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + HDSpace search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self.login(): return results for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: + if mode != 'RSS': + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_url = self.urls['search'] % (quote_plus(search_string.replace('.', ' ')),) else: search_url = self.urls['search'] % '' - if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) - data = self.get_url(search_url, returns='text') if not data or 'please try later' in data: logger.log('No data returned from provider', logger.DEBUG) @@ -150,8 +170,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -165,7 +185,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index ed68281739..904037f2a7 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -33,26 +33,34 @@ class HDTorrentsProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """HDTorrents Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'HDTorrents') + # Credentials self.username = None self.password = None - self.minseed = None - self.minleech = None - self.freeleech = None - + # URLs self.url = 'https://hd-torrents.org/' self.urls = { 'login': urljoin(self.url, 'login.php'), 'search': urljoin(self.url, 'torrents.php'), } + # Proper Strings self.proper_strings = ['PROPER', 'REPACK', 'REAL'] + # Miscellaneous Options + self.freeleech = None + + # Torrent Stats + self.minseed = None + self.minleech = None + + # Cache self.cache = tvcache.TVCache(self, min_time=30) def _check_auth(self): @@ -84,7 +92,15 @@ def login(self): return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + HDTorrents search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self.login(): return results @@ -99,20 +115,19 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'category[2]': 30, 'category[3]': 38, 'category[4]': 65, + 'active': 5 if self.freeleech else None, } for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': search_params['search'] = search_string - logger.log('Search string: {0}'.format(search_string), logger.DEBUG) - - if self.freeleech: - search_params['active'] = 5 + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) response = self.get_url(self.urls['search'], params=search_params, returns='response') if not response or not response.text: @@ -160,8 +175,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -175,7 +190,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/hounddawgs.py b/sickbeard/providers/hounddawgs.py index b4f91c7e62..5ae3c86730 100644 --- a/sickbeard/providers/hounddawgs.py +++ b/sickbeard/providers/hounddawgs.py @@ -21,6 +21,7 @@ import re import traceback +from requests.compat import urljoin from requests.utils import dict_from_cookiejar from sickbeard import logger, tvcache @@ -31,42 +32,35 @@ class HoundDawgsProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """HoundDawgs Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'HoundDawgs') + # Credentials self.username = None self.password = None - self.minseed = None - self.minleech = None - self.freeleech = None - self.ranked = None + # URLs + self.url = 'https://hounddawgs.org' self.urls = { - 'base_url': 'https://hounddawgs.org/', - 'search': 'https://hounddawgs.org/torrents.php', - 'login': 'https://hounddawgs.org/login.php' + 'base_url': self.url, + 'search': urljoin(self.url, 'torrents.php'), + 'login': urljoin(self.url, 'login.php'), } - self.url = self.urls['base_url'] + # Proper Strings - self.search_params = { - 'filter_cat[85]': 1, - 'filter_cat[58]': 1, - 'filter_cat[57]': 1, - 'filter_cat[74]': 1, - 'filter_cat[92]': 1, - 'filter_cat[93]': 1, - 'order_by': 's3', - 'order_way': 'desc', - 'type': '', - 'userid': '', - 'searchstr': '', - 'searchimdb': '', - 'searchtags': '' - } + # Miscellaneous Options + self.freeleech = None + self.ranked = None + # Torrent Stats + self.minseed = None + self.minleech = None + + # Cache self.cache = tvcache.TVCache(self) def login(self): @@ -94,26 +88,49 @@ def login(self): return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals,too-many-branches,too-many-statements + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + HoundDawgs search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self.login(): return results + # Search Params + search_params = { + 'filter_cat[85]': 1, + 'filter_cat[58]': 1, + 'filter_cat[57]': 1, + 'filter_cat[74]': 1, + 'filter_cat[92]': 1, + 'filter_cat[93]': 1, + 'order_by': 's3', + 'order_way': 'desc', + 'type': '', + 'userid': '', + 'searchstr': '', + 'searchimdb': '', + 'searchtags': '' + } for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) - - self.search_params['searchstr'] = search_string + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) - data = self.get_url(self.urls['search'], params=self.search_params, returns='text') + search_params['searchstr'] = search_string + data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: - logger.log('URL did not return data', logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue str_table_start = "
', '', data, 0) with BS4Parser(data, 'html5lib') as html: - if not html: - logger.log('No data returned from provider', logger.DEBUG) - continue - - if html.find(text='No Torrents Found!'): - logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) - continue - torrent_table = html.find('table', attrs={'class': 'torrents'}) torrents = torrent_table('tr') if torrent_table else [] - # Continue only if one release is found - if len(torrents) < 2: + # Continue only if at least one release is found + if len(torrents) < 2 or html.find(text='No Torrents Found!'): logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue + # Skip column headers for result in torrents[1:]: try: title = result('td')[1].find('a').text @@ -144,8 +151,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -159,7 +166,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py index 118fcf55fd..c9d6d3d704 100644 --- a/sickbeard/providers/kat.py +++ b/sickbeard/providers/kat.py @@ -32,22 +32,32 @@ class KatProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """KAT Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'KickAssTorrents') + # Credentials self.public = True + # URLs + self.url = 'https://kat.cr' + self.urls = { + 'search': urljoin(self.url, '%s/'), + } + self.custom_url = None + + # Proper Strings + + # Miscellaneous Options self.confirmed = True + + # Torrent Stats self.minseed = None self.minleech = None - self.url = 'https://kat.cr' - self.urls = {'search': urljoin(self.url, '%s/')} - - self.custom_url = None - + # Cache self.cache = tvcache.TVCache(self, search_params={'RSS': ['tv', 'anime']}) def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals, too-many-statements @@ -64,7 +74,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: @@ -135,7 +145,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': info_hash + 'hash': info_hash, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/limetorrents.py b/sickbeard/providers/limetorrents.py index 8e8e1dd798..1174464701 100644 --- a/sickbeard/providers/limetorrents.py +++ b/sickbeard/providers/limetorrents.py @@ -37,13 +37,15 @@ class LimeTorrentsProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - """Search provider LimeTorrents.""" - + """LimeTorrents Torrent provider""" def __init__(self): # Provider Inits TorrentProvider.__init__(self, 'LimeTorrents') + # Credentials + self.public = True + # URLs self.url = 'https://www.limetorrents.cc/' self.urls = { @@ -53,34 +55,32 @@ def __init__(self): 'rss': urljoin(self.url, '/browse-torrents/TV-shows/date/{page}/') } - # Credentials - self.public = True + # Proper Strings + self.proper_strings = ['PROPER', 'REPACK', 'REAL'] + + # Miscellaneous Options self.confirmed = False # Torrent Stats self.minseed = None self.minleech = None - # Proper Strings - self.proper_strings = ['PROPER', 'REPACK', 'REAL'] - # Cache self.cache = tvcache.TVCache(self, min_time=10) def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches,too-many-locals """ - Search the provider for results. - - :param search_strings: Search to perform - :param age: Not used for this provider - :param ep_obj: Not used for this provider + ABNormal search and parsing - :return: A list of items found + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) """ results = [] for mode in search_strings: - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode == 'RSS': @@ -168,7 +168,7 @@ def parse(self, data, mode): 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': torrent_hash + 'hash': torrent_hash, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/morethantv.py b/sickbeard/providers/morethantv.py index bdab9316c4..a6fdf39413 100644 --- a/sickbeard/providers/morethantv.py +++ b/sickbeard/providers/morethantv.py @@ -33,7 +33,7 @@ class MoreThanTVProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """MoreThanTV Torrent provider""" def __init__(self): # Provider Init @@ -45,11 +45,6 @@ def __init__(self): self._uid = None self._hash = None - # Torrent Stats - self.minseed = None - self.minleech = None - self.freeleech = None - # URLs self.url = 'https://www.morethan.tv/' self.urls = { @@ -60,6 +55,13 @@ def __init__(self): # Proper Strings self.proper_strings = ['PROPER', 'REPACK'] + # Miscellaneous Options + self.freeleech = None + + # Torrent Stats + self.minseed = None + self.minleech = None + # Cache self.cache = tvcache.TVCache(self) @@ -94,6 +96,14 @@ def login(self): return True def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + MoreThanTV search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self.login(): return results @@ -105,7 +115,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'order_way': 'desc', 'action': 'basic', 'searchsubmit': 1, - 'searchstr': '' + 'searchstr': '', } # Units @@ -121,13 +131,13 @@ def process_column_header(td): for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_params['searchstr'] = search_string @@ -140,7 +150,7 @@ def process_column_header(td): torrent_table = html.find('table', class_='torrent_table') torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if at least one Release is found + # Continue only if at least one release is found if len(torrent_rows) < 2: logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue @@ -149,6 +159,10 @@ def process_column_header(td): # Skip column headers for result in torrent_rows[1:]: + cells = result('td') + if len(cells) < len(labels): + continue + try: # skip if torrent has been nuked due to poor quality if result.find('img', alt='Nuked'): @@ -159,15 +173,14 @@ def process_column_header(td): if not all([title, download_url]): continue - cells = result('td') seeders = try_int(cells[labels.index('Seeders')].get_text(strip=True)) leechers = try_int(cells[labels.index('Leechers')].get_text(strip=True)) # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - " minimum seeders: {0}. Seeders: {1})".format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -181,7 +194,7 @@ def process_column_header(td): 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/newpct.py b/sickbeard/providers/newpct.py index dba63d9ace..b41b37a38e 100644 --- a/sickbeard/providers/newpct.py +++ b/sickbeard/providers/newpct.py @@ -33,45 +33,58 @@ class newpctProvider(TorrentProvider): - + """Newpct Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'Newpct') - self.onlyspasearch = None + # Credentials + # URLs self.url = 'http://www.newpct.com' - self.urls = {'search': urljoin(self.url, 'index.php')} + self.urls = { + 'search': urljoin(self.url, 'index.php'), + } + + # Proper Strings + # Miscellaneous Options + self.onlyspasearch = None + + # Torrent Stats + + # Cache self.cache = tvcache.TVCache(self, min_time=20) - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ - Search query: - http://www.newpct.com/index.php?l=doSearch&q=fringe&category_=All&idioma_=1&bus_de_=All - - q => Show name - category_ = Category 'Shows' (767) - idioma_ = Language Spanish (1) - bus_de_ = Date from (All, hoy) + Newpct search and parsing + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) """ + results = [] # Only search if user conditions are true lang_info = '' if not ep_obj or not ep_obj.show else ep_obj.show.lang + # http://www.newpct.com/index.php?l=doSearch&q=fringe&category_=All&idioma_=1&bus_de_=All + # Search Params search_params = { 'l': 'doSearch', - 'q': '', - 'category_': 'All', - 'idioma_': 1, - 'bus_de_': 'All' + 'q': '', # Show name + 'category_': 'All', # Category 'Shows' (767) + 'idioma_': 1, # Language Spanish (1) + 'bus_de_': 'All' # Date from (All, hoy) } for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) # Only search if user conditions are true if self.onlyspasearch and lang_info != 'es' and mode != 'RSS': @@ -81,41 +94,45 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_params['bus_de_'] = 'All' if mode != 'RSS' else 'hoy' for search_string in search_strings[mode]: + if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_params['q'] = search_string - data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: + logger.log('No data returned from provider', logger.DEBUG) continue with BS4Parser(data, 'html5lib') as html: torrent_table = html.find('table', id='categoryTable') torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if at least one Release is found + # Continue only if at least one release is found if len(torrent_rows) < 3: # Headers + 1 Torrent + Pagination logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue # 'Fecha', 'Título', 'Tamaño', '' - # Date, Title, Size + # Date, Title, Size labels = [label.get_text(strip=True) for label in torrent_rows[0]('th')] + + # Skip column headers for row in torrent_rows[1:-1]: - try: - cells = row('td') + cells = row('td') + if len(cells) < len(labels): + continue + try: torrent_row = row.find('a') title = self._processTitle(torrent_row.get('title', '')) download_url = torrent_row.get('href', '') if not all([title, download_url]): continue - # Provider does not provide seeders/leechers - seeders = 1 - leechers = 0 + seeders = 1 # Provider does not provide seeders + leechers = 0 # Provider does not provide leechers torrent_size = cells[labels.index('Tamaño')].get_text(strip=True) size = convert_size(torrent_size) or -1 @@ -126,7 +143,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index 23a05559dd..d4c8e2b482 100644 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -278,7 +278,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if not self._check_auth(): return results - # For providers that don't have no caps, or for which the t=caps is not working. + # For providers that don't have caps, or for which the t=caps is not working. if not self.caps and all(provider not in self.url for provider in ['gingadaddy', 'usenet-crawler']): self.get_newznab_categories(just_caps=True) if not self.caps: @@ -313,12 +313,13 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_params.pop('ep', '') items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) if search_params['t'] != 'tvsearch': search_params['q'] = search_string @@ -379,7 +380,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/norbits.py b/sickbeard/providers/norbits.py index 8bcedcc710..7b7cb9307d 100644 --- a/sickbeard/providers/norbits.py +++ b/sickbeard/providers/norbits.py @@ -20,6 +20,7 @@ import traceback import json +from requests.compat import urljoin from requests.compat import urlencode from sickbeard import logger, tvcache @@ -34,19 +35,32 @@ class NorbitsProvider(TorrentProvider): # pylint: disable=too-many-instance-att def __init__(self): """ Initialize the class """ + + # Provider Init TorrentProvider.__init__(self, 'Norbits') + # Credentials self.username = None self.passkey = None + + # URLs + self.url = 'https://norbits.net' + self.urls = { + 'search': urljoin(self.url, 'api2.php?action=torrents'), + 'download': urljoin(self.url, 'download.php?'), + } + + # Proper Strings + + # Miscellaneous Options + + # Torrent Stats self.minseed = None self.minleech = None + # Cache self.cache = tvcache.TVCache(self, min_time=20) # only poll Norbits every 15 minutes max - self.url = 'https://norbits.net' - self.urls = {'search': self.url + '/api2.php?action=torrents', - 'download': self.url + '/download.php?'} - def _check_auth(self): if not self.username or not self.passkey: @@ -72,12 +86,13 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: + if mode != 'RSS': - logger.log('Search string: {0}'.format - (search_string), logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) post_data = { 'username': self.username, @@ -100,6 +115,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('Resulting JSON from provider is not correct, ' 'not parsing it', logger.ERROR) + # Skip column headers for item in json_items.get('torrents', []): try: title = item.pop('name', '') @@ -113,10 +129,12 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man seeders = try_int(item.pop('seeders', 0)) leechers = try_int(item.pop('leechers', 0)) + # Filter unseeded torrent if seeders < min(self.minseed, 1): - logger.log('Discarding torrent because it does not meet ' - 'the minimum seeders: {0}. Seeders: {1})'.format - (title, seeders), logger.DEBUG) + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format + (title, seeders), logger.DEBUG) continue info_hash = item.pop('info_hash', '') @@ -129,7 +147,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': info_hash + 'hash': info_hash, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/nyaatorrents.py b/sickbeard/providers/nyaatorrents.py index f7b8b82725..fe66a4e041 100644 --- a/sickbeard/providers/nyaatorrents.py +++ b/sickbeard/providers/nyaatorrents.py @@ -31,47 +31,65 @@ class NyaaProvider(TorrentProvider): # pylint: disable=too-many-instance-attrib def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'NyaaTorrents') + # Credentials self.public = True - self.supports_absolute_numbering = True - self.anime_only = True + # URLs self.url = 'http://www.nyaa.se' - self.minseed = 0 - self.minleech = 0 - self.confirmed = False + # Miscellaneous Options + self.supports_absolute_numbering = True + self.anime_only = True + self.confirmed = False self.regex = re.compile(r'(\d+) seeder\(s\), (\d+) leecher\(s\), \d+ download\(s\) - (\d+.?\d* [KMGT]iB)(.*)', re.DOTALL) + # Torrent Stats + self.minseed = 0 + self.minleech = 0 + + # Cache self.cache = tvcache.TVCache(self, min_time=20) # only poll NyaaTorrents every 20 minutes max def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + NyaaTorrents search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if self.show and not self.show.is_anime: return results + # Search Params search_params = { 'page': 'rss', 'cats': '1_0', # All anime 'sort': 2, # Sort Descending By Seeders - 'order': 1 + 'order': 1, } for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: + if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_params['term'] = search_string - data = self.cache.getRSSFeed(self.url, params=search_params)['entries'] if not data: logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + continue for curItem in data: try: @@ -90,10 +108,11 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man seeders = try_int(seeders) leechers = try_int(leechers) + # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -111,7 +130,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format From 5370ba1460429cf76dceed43d604492519d7ce63 Mon Sep 17 00:00:00 2001 From: labrys Date: Fri, 17 Jun 2016 07:38:30 -0400 Subject: [PATCH 064/134] More standardization --- sickbeard/providers/tntvillage.py | 57 +++++++++++++++++++------------ 1 file changed, 36 insertions(+), 21 deletions(-) diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index 96578afe81..3c4efb1fc9 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -36,32 +36,38 @@ class TNTVillageProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """TNTVillage Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'TNTVillage') - self._uid = None - self._hash = None - + # Credentials self.username = None self.password = None + self._uid = None + self._hash = None - self.minseed = None - self.minleech = None - + # URLs self.url = 'http://forum.tntvillage.scambioetico.org/' self.urls = { 'login': urljoin(self.url, 'index.php?act=Login&CODE=01'), 'download': urljoin(self.url, 'index.php?act=Attach&type=post&id={0}'), } + # Proper Strings self.proper_strings = ['PROPER', 'REPACK'] - self.cache = tvcache.TVCache(self, min_time=30) # only poll TNTVillage every 30 minutes max - + # Miscellaneous Options self.subtitle = None + # Torrent Stats + self.minseed = None + self.minleech = None + + # Cache + self.cache = tvcache.TVCache(self, min_time=30) # only poll TNTVillage every 30 minutes max + def _check_auth(self): if not self.username or not self.password: @@ -80,7 +86,7 @@ def login(self): 'UserName': self.username, 'PassWord': self.password, 'CookieDate': 1, - 'submit': 'Connettiti al Forum' + 'submit': 'Connettiti al Forum', } response = self.get_url(self.urls['login'], post_data=login_params, returns='text') @@ -95,7 +101,15 @@ def login(self): return True - def search(self, search_strings, age=0, ep_obj=None): + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + TNTVillage search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self.login(): return results @@ -103,19 +117,19 @@ def search(self, search_strings, age=0, ep_obj=None): search_params = { 'act': 'allreleases', 'filter': '', + 'cat': 29, } for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: - if mode != 'RSS': - search_params['filter'] = search_string - logger.log('Search string: {0}'.format(search_string), logger.DEBUG) - else: - search_params['cat'] = 29 + if mode != 'RSS': + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) + search_params['filter'] = search_string response = self.get_url(self.url, params=search_params, returns='response') if not response or not response.text: @@ -126,11 +140,12 @@ def search(self, search_strings, age=0, ep_obj=None): torrent_table = html.find('table', class_='copyright') torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if one release is found + # Continue only if at least one release is found if len(torrent_rows) < 3: logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue + # Skip column headers for result in torrent_table('tr')[1:]: try: cells = result('td') @@ -156,8 +171,8 @@ def search(self, search_strings, age=0, ep_obj=None): # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -176,7 +191,7 @@ def search(self, search_strings, age=0, ep_obj=None): 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format From a473e5964307cf37eb9c07cd2df421d30745dcf0 Mon Sep 17 00:00:00 2001 From: medariox Date: Fri, 17 Jun 2016 14:19:22 +0200 Subject: [PATCH 065/134] Bring back eng releases only option --- sickbeard/providers/tntvillage.py | 141 ++++++++++++++++-------------- 1 file changed, 74 insertions(+), 67 deletions(-) diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index 3c4efb1fc9..a3569a1399 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -59,6 +59,7 @@ def __init__(self): self.proper_strings = ['PROPER', 'REPACK'] # Miscellaneous Options + self.engrelease = None self.subtitle = None # Torrent Stats @@ -117,7 +118,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_params = { 'act': 'allreleases', 'filter': '', - 'cat': 29, } for mode in search_strings: @@ -126,86 +126,93 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: + if self.engrelease: + search_params['filter'] = 'eng' + search_string += ' eng' + if mode != 'RSS': logger.log('Search string: {search}'.format (search=search_string), logger.DEBUG) search_params['filter'] = search_string + else: + search_params['cat'] = 29 - response = self.get_url(self.url, params=search_params, returns='response') - if not response or not response.text: - logger.log('No data returned from provider', logger.DEBUG) - continue + response = self.get_url(self.url, params=search_params, returns='response') + if not response or not response.text: + logger.log('No data returned from provider', logger.DEBUG) + continue - with BS4Parser(response.text, 'html5lib') as html: - torrent_table = html.find('table', class_='copyright') - torrent_rows = torrent_table('tr') if torrent_table else [] + with BS4Parser(response.text, 'html5lib') as html: + torrent_table = html.find('table', class_='copyright') + torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if at least one release is found - if len(torrent_rows) < 3: - logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) - continue + # Continue only if at least one release is found + if len(torrent_rows) < 3: + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + continue # Skip column headers - for result in torrent_table('tr')[1:]: - try: - cells = result('td') - if not cells: - continue - - last_cell_anchor = cells[-1].find('a') - if not last_cell_anchor: - continue - params = parse_qs(last_cell_anchor.get('href', '')) - download_url = self.urls['download'].format(params['pid'][0]) if \ - params.get('pid') else None - title = _normalize_title(cells[0], cells[1], mode) - if not all([title, download_url]): - continue - - info_cell = cells[3].find_all('td') - leechers = info_cell[0].find('span').get_text(strip=True) - leechers = try_int(leechers) - seeders = info_cell[1].find('span').get_text() - seeders = try_int(seeders, 1) - - # Filter unseeded torrent - if seeders < min(self.minseed, 1): - if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the " - "minimum seeders: {0}. Seeders: {1}".format - (title, seeders), logger.DEBUG) - continue - - if _has_only_subs(title) and not self.subtitle: - logger.log('Torrent is only subtitled, skipping: {0}'.format - (title), logger.DEBUG) - continue - - torrent_size = info_cell[3].find('span').get_text() + ' GB' - size = convert_size(torrent_size) or -1 - - item = { - 'title': title, - 'link': download_url, - 'size': size, - 'seeders': seeders, - 'leechers': leechers, - 'pubdate': None, - 'hash': None, - } + for result in torrent_table('tr')[1:]: + try: + cells = result('td') + if not cells: + continue + + last_cell_anchor = cells[-1].find('a') + if not last_cell_anchor: + continue + params = parse_qs(last_cell_anchor.get('href', '')) + download_url = self.urls['download'].format(params['pid'][0]) if \ + params.get('pid') else None + title = _normalize_title(cells[0], cells[1], mode) + if not all([title, download_url]): + continue + + info_cell = cells[3].find_all('td') + leechers = info_cell[0].find('span').get_text(strip=True) + leechers = try_int(leechers) + seeders = info_cell[1].find('span').get_text() + seeders = try_int(seeders, 1) + + # Filter unseeded torrent + if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log('Found result: {0} with {1} seeders and {2} leechers'.format - (title, seeders, leechers), logger.DEBUG) + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format + (title, seeders), logger.DEBUG) + continue - items.append(item) - except (AttributeError, TypeError, KeyError, ValueError, IndexError): - logger.log('Failed parsing provider. Traceback: {0!r}'.format - (traceback.format_exc()), logger.ERROR) + if _has_only_subs(title) and not self.subtitle: + logger.log('Torrent is only subtitled, skipping: {0}'.format + (title), logger.DEBUG) continue - results += items + torrent_size = info_cell[3].find('span').get_text() + ' GB' + size = convert_size(torrent_size) or -1 + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None, + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) + + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue - return results + items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) + results += items + + return results def _normalize_title(title, info, mode): From 817faf04e068ff86333363fc025848977a6f1792 Mon Sep 17 00:00:00 2001 From: medariox Date: Fri, 17 Jun 2016 14:24:08 +0200 Subject: [PATCH 066/134] small fixup --- sickbeard/providers/tntvillage.py | 1 - 1 file changed, 1 deletion(-) diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index a3569a1399..cff75925cb 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -209,7 +209,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man (traceback.format_exc()), logger.ERROR) continue - items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True) results += items return results From a1d13d35000f56a5fbd0bea751535f33ca8f6091 Mon Sep 17 00:00:00 2001 From: medariox Date: Fri, 17 Jun 2016 14:57:42 +0200 Subject: [PATCH 067/134] Small tnt change --- sickbeard/providers/tntvillage.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index cff75925cb..1febbbe3ea 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -118,6 +118,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_params = { 'act': 'allreleases', 'filter': '', + 'cat': 29, } for mode in search_strings: @@ -134,8 +135,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('Search string: {search}'.format (search=search_string), logger.DEBUG) search_params['filter'] = search_string - else: - search_params['cat'] = 29 + search_params['cat'] = None response = self.get_url(self.url, params=search_params, returns='response') if not response or not response.text: From fc90ee39f5d8db578a0f5f2214b93f48ee6cf164 Mon Sep 17 00:00:00 2001 From: medariox Date: Fri, 17 Jun 2016 16:02:28 +0200 Subject: [PATCH 068/134] Update daily search url --- sickbeard/providers/thepiratebay.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py index af7a316b9a..cba63effdb 100644 --- a/sickbeard/providers/thepiratebay.py +++ b/sickbeard/providers/thepiratebay.py @@ -49,7 +49,7 @@ def __init__(self): # URLs self.url = 'https://thepiratebay.se' self.urls = { - 'rss': urljoin(self.url, 'browse/200'), + 'rss': urljoin(self.url, 'tv/latest'), 'search': urljoin(self.url, 's/'), # Needs trailing / } self.custom_url = None From b7619a96325b1000d203ce84563efb798a785425 Mon Sep 17 00:00:00 2001 From: Dario Date: Sat, 18 Jun 2016 11:50:22 +0200 Subject: [PATCH 069/134] Remove freeleech option for MTV --- sickbeard/providers/morethantv.py | 1 - 1 file changed, 1 deletion(-) diff --git a/sickbeard/providers/morethantv.py b/sickbeard/providers/morethantv.py index a6fdf39413..b4a08ba474 100644 --- a/sickbeard/providers/morethantv.py +++ b/sickbeard/providers/morethantv.py @@ -56,7 +56,6 @@ def __init__(self): self.proper_strings = ['PROPER', 'REPACK'] # Miscellaneous Options - self.freeleech = None # Torrent Stats self.minseed = None From 0691ac04ed3f288978508a71e37be0b7d4011a00 Mon Sep 17 00:00:00 2001 From: Dario Date: Sat, 18 Jun 2016 11:55:10 +0200 Subject: [PATCH 070/134] Remove TypeError from connection time out --- sickbeard/search.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sickbeard/search.py b/sickbeard/search.py index a5e9c1f105..1211b8d62f 100644 --- a/sickbeard/search.py +++ b/sickbeard/search.py @@ -548,7 +548,7 @@ def searchProviders(show, episodes, forced_search=False, downCurQuality=False, m except AuthException as e: logger.log(u"Authentication error: " + ex(e), logger.ERROR) break - except (SocketTimeout, TypeError) as e: + except SocketTimeout as e: logger.log(u"Connection timed out (sockets) while searching %s. Error: %r" % (cur_provider.name, ex(e)), logger.DEBUG) break From 81a9d1b863d60b72a39faaa1a857b0d23c9a530f Mon Sep 17 00:00:00 2001 From: Labrys Date: Sat, 18 Jun 2016 09:14:05 -0400 Subject: [PATCH 071/134] FIx repeated keyword in dict --- sickbeard/providers/hdtorrents.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index 904037f2a7..d7d4070628 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -108,14 +108,13 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Search Params search_params = { 'search': '', - 'active': 1, + 'active': 5 if self.freeleech else 1, 'options': 0, 'category[0]': 59, 'category[1]': 60, 'category[2]': 30, 'category[3]': 38, 'category[4]': 65, - 'active': 5 if self.freeleech else None, } for mode in search_strings: From e431f9cc4f25ea731e2a4ece0fb06ddb4d88767f Mon Sep 17 00:00:00 2001 From: Labrys Date: Fri, 17 Jun 2016 23:12:47 -0400 Subject: [PATCH 072/134] More standardization --- sickbeard/providers/nyaatorrents.py | 1 - sickbeard/providers/omgwtfnzbs.py | 22 +++++--- sickbeard/providers/pretome.py | 76 +++++++++++++++++---------- sickbeard/providers/rarbg.py | 43 ++++++++++----- sickbeard/providers/scc.py | 28 ++++++---- sickbeard/providers/scenetime.py | 71 ++++++++++++++++--------- sickbeard/providers/shazbat.py | 21 +++++--- sickbeard/providers/speedcd.py | 34 ++++++------ sickbeard/providers/t411.py | 59 ++++++++++++++------- sickbeard/providers/thepiratebay.py | 24 +++++---- sickbeard/providers/tntvillage.py | 8 +-- sickbeard/providers/tokyotoshokan.py | 48 ++++++++++++----- sickbeard/providers/torrentbytes.py | 41 +++++++++------ sickbeard/providers/torrentday.py | 27 +++++----- sickbeard/providers/torrentleech.py | 32 +++++++---- sickbeard/providers/torrentproject.py | 23 ++++---- sickbeard/providers/torrentz.py | 27 +++++----- sickbeard/providers/transmitthenet.py | 32 +++++++---- sickbeard/providers/tvchaosuk.py | 52 ++++++++++++------ sickbeard/providers/womble.py | 21 ++++++-- sickbeard/providers/xthor.py | 58 ++++++++++---------- sickbeard/providers/zooqle.py | 21 ++++---- 22 files changed, 488 insertions(+), 281 deletions(-) diff --git a/sickbeard/providers/nyaatorrents.py b/sickbeard/providers/nyaatorrents.py index fe66a4e041..e4c2c650e3 100644 --- a/sickbeard/providers/nyaatorrents.py +++ b/sickbeard/providers/nyaatorrents.py @@ -40,7 +40,6 @@ def __init__(self): # URLs self.url = 'http://www.nyaa.se' - # Miscellaneous Options self.supports_absolute_numbering = True self.anime_only = True diff --git a/sickbeard/providers/omgwtfnzbs.py b/sickbeard/providers/omgwtfnzbs.py index 315efd8bb5..ee34ace9c9 100644 --- a/sickbeard/providers/omgwtfnzbs.py +++ b/sickbeard/providers/omgwtfnzbs.py @@ -20,8 +20,8 @@ import re import traceback -import sickbeard +import sickbeard from sickbeard import logger, tvcache from sickrage.helper.common import convert_size, try_int @@ -31,21 +31,29 @@ class OmgwtfnzbsProvider(NZBProvider): def __init__(self): + + # Provider Init NZBProvider.__init__(self, 'OMGWTFNZBs') + # Credentials self.username = None self.api_key = None - self.cache = OmgwtfnzbsCache(self) - + # URLs self.url = 'https://omgwtfnzbs.org/' self.urls = { 'rss': 'https://rss.omgwtfnzbs.org/rss-download.php', - 'api': 'https://api.omgwtfnzbs.org/json/' + 'api': 'https://api.omgwtfnzbs.org/json/', } + # Proper Strings self.proper_strings = ['.PROPER.', '.REPACK.'] + # Miscellaneous Options + + # Cache + self.cache = OmgwtfnzbsCache(self) + def _check_auth(self): if not self.username or not self.api_key: @@ -105,13 +113,13 @@ def search(self, search_strings, age=0, ep_obj=None): for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: search_params['search'] = search_string if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) data = self.get_url(self.urls['api'], params=search_params, returns='json') if not data: diff --git a/sickbeard/providers/pretome.py b/sickbeard/providers/pretome.py index 6aa5da0664..59af9edf38 100644 --- a/sickbeard/providers/pretome.py +++ b/sickbeard/providers/pretome.py @@ -21,6 +21,7 @@ import re import traceback +from requests.compat import urljoin from requests.compat import quote from requests.utils import dict_from_cookiejar @@ -32,29 +33,38 @@ class PretomeProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """Pretome Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'Pretome') + # Credentials self.username = None self.password = None self.pin = None - self.minseed = None - self.minleech = None - self.urls = {'base_url': 'https://pretome.info', - 'login': 'https://pretome.info/takelogin.php', - 'detail': 'https://pretome.info/details.php?id=%s', - 'search': 'https://pretome.info/browse.php?search=%s%s', - 'download': 'https://pretome.info/download.php/%s/%s.torrent'} - - self.url = self.urls['base_url'] + # URLs + self.url = 'https://pretome.info' + self.urls = { + 'base_url': self.url, + 'login': urljoin(self.url, 'takelogin.php'), + 'search': urljoin(self.url, 'browse.php?search=%s%s'), + 'download': urljoin(self.url, 'download.php/%s/%s.torrent'), + 'detail': urljoin(self.url, 'details.php?id=%s'), + } + + # Proper Strings + self.proper_strings = ['PROPER', 'REPACK'] + # Miscellaneous Options self.categories = '&st=1&cat%5B%5D=7' - self.proper_strings = ['PROPER', 'REPACK'] + # Torrent Stats + self.minseed = None + self.minleech = None + # Cache self.cache = tvcache.TVCache(self) def _check_auth(self): @@ -68,9 +78,11 @@ def login(self): if any(dict_from_cookiejar(self.session.cookies).values()): return True - login_params = {'username': self.username, - 'password': self.password, - 'login_pin': self.pin} + login_params = { + 'username': self.username, + 'password': self.password, + 'login_pin': self.pin, + } response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: @@ -83,25 +95,33 @@ def login(self): return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-statements, too-many-locals + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + Pretome search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self.login(): return results for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_url = self.urls['search'] % (quote(search_string), self.categories) - data = self.get_url(search_url, returns='text') if not data: + logger.log('No data returned from provider', logger.DEBUG) continue with BS4Parser(data, 'html5lib') as html: @@ -119,8 +139,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man torrent_rows = torrent_table('tr', attrs={'class': 'browse'}) for result in torrent_rows: + cells = result('td') try: - cells = result('td') size = None link = cells[1].find('a', attrs={'style': 'font-size: 1.25em; font-weight: bold;'}) @@ -138,19 +158,19 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man seeders = int(cells[9].contents[0]) leechers = int(cells[10].contents[0]) - # Need size for failed downloads handling - if size is None: - torrent_size = cells[7].text - size = convert_size(torrent_size) or -1 - # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue + # Need size for failed downloads handling + if size is None: + torrent_size = cells[7].text + size = convert_size(torrent_size) or -1 + item = { 'title': title, 'link': download_url, @@ -158,7 +178,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/rarbg.py b/sickbeard/providers/rarbg.py index 4b29b63eab..fa376ba62c 100644 --- a/sickbeard/providers/rarbg.py +++ b/sickbeard/providers/rarbg.py @@ -33,22 +33,32 @@ class RarbgProvider(TorrentProvider): # pylint: disable=too-many-instance-attri def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'Rarbg') + # Credentials self.public = True - self.minseed = None - self.ranked = None - self.sorting = None - self.minleech = None self.token = None self.token_expires = None - # Spec: https://torrentapi.org/apidocs_v2.txt - self.url = 'https://rarbg.com' - self.urls = {'api': 'http://torrentapi.org/pubapi_v2.php'} + # URLs + self.url = 'https://rarbg.com' # Spec: https://torrentapi.org/apidocs_v2.txt + self.urls = { + 'api': 'http://torrentapi.org/pubapi_v2.php', + } + # Proper Strings self.proper_strings = ['{{PROPER|REPACK}}'] + # Miscellaneous Options + self.ranked = None + self.sorting = None + + # Torrent Stats + self.minseed = None + self.minleech = None + + # Cache self.cache = tvcache.TVCache(self, min_time=10) # only poll RARBG every 10 minutes max def login(self): @@ -58,7 +68,7 @@ def login(self): login_params = { 'get_token': 'get_token', 'format': 'json', - 'app_id': 'sickrage2' + 'app_id': 'sickrage2', } response = self.get_url(self.urls['api'], params=login_params, returns='json') @@ -71,10 +81,19 @@ def login(self): return self.token is not None def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals, too-many-statements + """ + RARBG search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self.login(): return results + # Search Params search_params = { 'app_id': 'sickrage2', 'category': 'tv', @@ -95,7 +114,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) if mode == 'RSS': search_params['sort'] = 'last' @@ -112,6 +131,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_params.pop('search_tvdb', None) for search_string in search_strings[mode]: + if mode != 'RSS': search_params['search_string'] = search_string logger.log('Search string: {0}'.format(search_string), @@ -159,6 +179,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man seeders = item.pop('seeders') leechers = item.pop('leechers') + # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" @@ -169,10 +190,6 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man torrent_size = item.pop('size', -1) size = convert_size(torrent_size) or -1 - if mode != 'RSS': - logger.log('Found result: {0} with {1} seeders and {2} leechers'.format - (title, seeders, leechers), logger.DEBUG) - item = { 'title': title, 'link': download_url, diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py index 588797f112..4782570a5f 100644 --- a/sickbeard/providers/scc.py +++ b/sickbeard/providers/scc.py @@ -36,15 +36,15 @@ class SCCProvider(TorrentProvider): # pylint: disable=too-many-instance-attribu def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'SceneAccess') + # Credentials self.username = None self.password = None - self.minseed = None - self.minleech = None - - self.cache = tvcache.TVCache(self) # only poll SCC every 20 minutes max + # URLs + self.url = self.urls['base_url'] self.urls = { 'base_url': 'https://sceneaccess.eu', 'login': 'https://sceneaccess.eu/login', @@ -53,14 +53,22 @@ def __init__(self): 'download': 'https://www.sceneaccess.eu/%s' } - self.url = self.urls['base_url'] + # Proper Strings + # Miscellaneous Options self.categories = { 'Season': 'c26=26&c44=44&c45=45', # Archive, non-scene HD, non-scene SD; need to include non-scene because WEB-DL packs get added to those categories 'Episode': 'c17=17&c27=27&c33=33&c34=34&c44=44&c45=45', # TV HD, TV SD, non-scene HD, non-scene SD, foreign XviD, foreign x264 'RSS': 'c17=17&c26=26&c27=27&c33=33&c34=34&c44=44&c45=45' # Season + Episode } + # Torrent Stats + self.minseed = None + self.minleech = None + + # Cache + self.cache = tvcache.TVCache(self) # only poll SCC every 20 minutes max + def login(self): if any(dict_from_cookiejar(self.session.cookies).values()): return True @@ -68,7 +76,7 @@ def login(self): login_params = { 'username': self.username, 'password': self.password, - 'submit': 'come on in' + 'submit': 'come on in', } response = self.get_url(self.urls['login'], post_data=login_params, returns='text') @@ -95,9 +103,10 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: + if mode != 'RSS': logger.log('Search string: {0}'.format(search_string), logger.DEBUG) @@ -105,6 +114,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man data = self.get_url(search_url, returns='text') if not data: + logger.log('No data returned from provider', logger.DEBUG) continue with BS4Parser(data, 'html5lib') as html: @@ -138,7 +148,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -152,7 +162,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/scenetime.py b/sickbeard/providers/scenetime.py index 35f99c3cfa..458408e5dd 100644 --- a/sickbeard/providers/scenetime.py +++ b/sickbeard/providers/scenetime.py @@ -21,7 +21,7 @@ import re import traceback -from requests.compat import quote +from requests.compat import urljoin, quote from requests.utils import dict_from_cookiejar from sickbeard import logger, tvcache @@ -32,34 +32,46 @@ class SceneTimeProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """SceneTime Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'SceneTime') + # Credentials self.username = None self.password = None - self.minseed = None - self.minleech = None - - self.cache = tvcache.TVCache(self) # only poll SceneTime every 20 minutes max - self.urls = {'base_url': 'https://www.scenetime.com', - 'login': 'https://www.scenetime.com/takelogin.php', - 'detail': 'https://www.scenetime.com/details.php?id=%s', - 'search': 'https://www.scenetime.com/browse.php?search=%s%s', - 'download': 'https://www.scenetime.com/download.php/%s/%s'} + # URLs + self.url = 'https://www.scenetime.com' + self.urls = { + 'base_url': self.url, + 'login': urljoin(self.url, 'takelogin.php'), + 'detail': urljoin(self.url, 'details.php?id=%s'), + 'search': urljoin(self.url, 'browse.php?search=%s%s'), + 'download': urljoin(self.url, 'download.php/%s/%s'), + } - self.url = self.urls['base_url'] + # Proper Strings + # Miscellaneous Options self.categories = '&c2=1&c43=13&c9=1&c63=1&c77=1&c79=1&c100=1&c101=1' + # Torrent Stats + self.minseed = None + self.minleech = None + + # Cache + self.cache = tvcache.TVCache(self) # only poll SceneTime every 20 minutes max + def login(self): if any(dict_from_cookiejar(self.session.cookies).values()): return True - login_params = {'username': self.username, - 'password': self.password} + login_params = { + 'username': self.username, + 'password': self.password, + } response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: @@ -72,25 +84,34 @@ def login(self): return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + SceneTime search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self.login(): return results for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_url = self.urls['search'] % (quote(search_string), self.categories) data = self.get_url(search_url, returns='text') if not data: + logger.log('No data returned from provider', logger.DEBUG) continue with BS4Parser(data, 'html5lib') as html: @@ -99,7 +120,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if torrent_table: torrent_rows = torrent_table.select('tr') - # Continue only if one Release is found + # Continue only if at least one release is found if len(torrent_rows) < 2: logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue @@ -109,13 +130,15 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # and using their index to find the correct download/seeders/leechers td. labels = [label.get_text(strip=True) for label in torrent_rows[0]('td')] + # Skip column headers for result in torrent_rows[1:]: - try: - cells = result('td') + cells = result('td') + if len(cells) < len(labels): + continue + try: link = cells[labels.index('Name')].find('a') torrent_id = link['href'].replace('details.php?id=', '').split('&')[0] - title = link.get_text(strip=True) download_url = self.urls['download'] % (torrent_id, '%s.torrent' % title.replace(' ', '.')) if not all([title, download_url]): @@ -128,7 +151,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -142,7 +165,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/shazbat.py b/sickbeard/providers/shazbat.py index 1b12c9da3d..5303da6b19 100644 --- a/sickbeard/providers/shazbat.py +++ b/sickbeard/providers/shazbat.py @@ -27,18 +27,16 @@ class ShazbatProvider(TorrentProvider): - + """Shazbat Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'Shazbat.tv') - self.supports_backlog = False - + # Credentials self.passkey = None - self.options = None - - self.cache = ShazbatCache(self, min_time=20) + # URLs self.url = 'http://www.shazbat.tv' self.urls = { 'login': urljoin(self.url, 'login'), @@ -47,6 +45,17 @@ def __init__(self): # 'rss_followed': urljoin(self.url, 'rss/followed') } + # Proper Strings + + # Miscellaneous Options + self.supports_backlog = False + self.options = None + + # Torrent Stats + + # Cache + self.cache = ShazbatCache(self, min_time=20) + def _check_auth(self): if not self.passkey: raise AuthException('Your authentication credentials are missing, check your config.') diff --git a/sickbeard/providers/speedcd.py b/sickbeard/providers/speedcd.py index 9539696276..19ca6efe65 100644 --- a/sickbeard/providers/speedcd.py +++ b/sickbeard/providers/speedcd.py @@ -32,7 +32,7 @@ class SpeedCDProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """SpeedCD Torrent provider""" def __init__(self): # Provider Init @@ -42,11 +42,6 @@ def __init__(self): self.username = None self.password = None - # Torrent Stats - self.minseed = None - self.minleech = None - self.freeleech = False - # URLs self.url = 'https://speed.cd' self.urls = { @@ -57,6 +52,13 @@ def __init__(self): # Proper Strings self.proper_strings = ['PROPER', 'REPACK'] + # Miscellaneous Options + self.freeleech = False + + # Torrent Stats + self.minseed = None + self.minleech = None + # Cache self.cache = tvcache.TVCache(self) @@ -116,18 +118,18 @@ def process_column_header(td): for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_params['search'] = search_string - data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: + logger.log('No data returned from provider', logger.DEBUG) continue with BS4Parser(data, 'html5lib') as html: @@ -135,7 +137,7 @@ def process_column_header(td): torrent_table = torrent_table.find('table') if torrent_table else None torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if at least one Release is found + # Continue only if at least one release is found if len(torrent_rows) < 2: logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue @@ -144,9 +146,11 @@ def process_column_header(td): # Skip column headers for result in torrent_rows[1:]: - try: - cells = result('td') + cells = result('td') + if len(cells) < len(labels): + continue + try: title = cells[labels.index('Title')].find('a', class_='torrent').get_text() download_url = urljoin(self.url, cells[labels.index('Download')].find(title='Download').parent['href']) if not all([title, download_url]): @@ -159,7 +163,7 @@ def process_column_header(td): if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -174,7 +178,7 @@ def process_column_header(td): 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/t411.py b/sickbeard/providers/t411.py index 767d57c107..3a759893f7 100644 --- a/sickbeard/providers/t411.py +++ b/sickbeard/providers/t411.py @@ -21,6 +21,7 @@ import time import traceback +from requests.compat import urljoin from requests.auth import AuthBase from sickbeard import logger, tvcache @@ -32,42 +33,51 @@ class T411Provider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """T411 Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, "T411") + # Credentials self.username = None self.password = None self.token = None self.tokenLastUpdate = None - self.cache = tvcache.TVCache(self, min_time=10) # Only poll T411 every 10 minutes max - - self.urls = {'base_url': 'http://www.t411.ch/', - 'search': 'https://api.t411.ch/torrents/search/%s*?cid=%s&limit=100', - 'rss': 'https://api.t411.ch/torrents/top/today', - 'login_page': 'https://api.t411.ch/auth', - 'download': 'https://api.t411.ch/torrents/download/%s'} + # URLs + self.url = 'https://api.t411.ch' + self.urls = { + 'base_url': 'http://www.t411.ch/', + 'search': urljoin(self.url, 'torrents/search/%s*?cid=%s&limit=100'), + 'rss': urljoin(self.url, 'torrents/top/today'), + 'login_page': urljoin(self.url, 'auth'), + 'download': urljoin(self.url, 'torrents/download/%s'), + } - self.url = self.urls['base_url'] + # Proper Strings + # Miscellaneous Options self.headers.update({'User-Agent': USER_AGENT}) - self.subcategories = [433, 637, 455, 639] + self.confirmed = False + # Torrent Stats self.minseed = 0 self.minleech = 0 - self.confirmed = False - def login(self): + # Cache + self.cache = tvcache.TVCache(self, min_time=10) # Only poll T411 every 10 minutes max + def login(self): if self.token is not None: if time.time() < (self.tokenLastUpdate + 30 * 60): return True - login_params = {'username': self.username, - 'password': self.password} + login_params = { + 'username': self.username, + 'password': self.password, + } response = self.get_url(self.urls['login_page'], post_data=login_params, returns='json') if not response: @@ -84,25 +94,34 @@ def login(self): logger.log('Token not found in authentication response', logger.WARNING) return False - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals, too-many-statements + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + T411 search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self.login(): return results for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_urls = ([self.urls['search'] % (search_string, u) for u in self.subcategories], [self.urls['rss']])[mode == 'RSS'] for search_url in search_urls: data = self.get_url(search_url, returns='json') if not data: + logger.log('No data returned from provider', logger.DEBUG) continue if 'torrents' not in data and mode != 'RSS': @@ -134,7 +153,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -153,7 +172,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py index cba63effdb..eefdce7eef 100644 --- a/sickbeard/providers/thepiratebay.py +++ b/sickbeard/providers/thepiratebay.py @@ -32,7 +32,7 @@ class ThePirateBayProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """ThePirateBay Torrent provider""" def __init__(self): # Provider Init @@ -41,11 +41,6 @@ def __init__(self): # Credentials self.public = True - # Torrent Stats - self.minseed = None - self.minleech = None - self.confirmed = True - # URLs self.url = 'https://thepiratebay.se' self.urls = { @@ -56,6 +51,13 @@ def __init__(self): # Proper Strings + # Miscellaneous Options + self.confirmed = True + + # Torrent Stats + self.minseed = None + self.minleech = None + # Cache self.cache = tvcache.TVCache(self, min_time=1) # only poll ThePirateBay every 30 minutes max @@ -86,7 +88,7 @@ def process_column_header(th): for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: @@ -107,14 +109,14 @@ def process_column_header(th): data = self.get_url(search_url, returns='text') if not data: - logger.log('URL did not return data, maybe try a custom url, or a different one', logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue with BS4Parser(data, 'html5lib') as html: torrent_table = html.find('table', id='searchResult') torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if at least one Release is found + # Continue only if at least one release is found if len(torrent_rows) < 2: logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue @@ -143,7 +145,7 @@ def process_column_header(th): if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -166,7 +168,7 @@ def process_column_header(th): 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index 1febbbe3ea..3d3a8d32ac 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -117,7 +117,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_params = { 'act': 'allreleases', - 'filter': '', + 'filter': 'eng ' if self.engrelease else '', 'cat': 29, } @@ -127,14 +127,10 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: - if self.engrelease: - search_params['filter'] = 'eng' - search_string += ' eng' - if mode != 'RSS': logger.log('Search string: {search}'.format (search=search_string), logger.DEBUG) - search_params['filter'] = search_string + search_params['filter'] += search_string search_params['cat'] = None response = self.get_url(self.url, params=search_params, returns='response') diff --git a/sickbeard/providers/tokyotoshokan.py b/sickbeard/providers/tokyotoshokan.py index c8127ef2c2..6e71464498 100644 --- a/sickbeard/providers/tokyotoshokan.py +++ b/sickbeard/providers/tokyotoshokan.py @@ -21,6 +21,8 @@ import re import traceback +from requests.compat import urljoin + from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser @@ -29,39 +31,57 @@ class TokyoToshokanProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """TokyoToshokan Torrent provider""" def __init__(self): + # Provider Init TorrentProvider.__init__(self, 'TokyoToshokan') + # Credentials self.public = True + + # URLs + self.url = 'http://tokyotosho.info/' + self.urls = { + 'search': urljoin(self.url, 'search.php'), + 'rss': urljoin(self.url, 'rss.php'), + } + + # Proper Strings + + # Miscellaneous Options self.supports_absolute_numbering = True self.anime_only = True + # Torrent Stats self.minseed = None self.minleech = None - self.url = 'http://tokyotosho.info/' - self.urls = { - 'search': self.url + 'search.php', - 'rss': self.url + 'rss.php' - } + # Cache self.cache = tvcache.TVCache(self, min_time=15) # only poll TokyoToshokan every 15 minutes max - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + TokyoToshokan search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if self.show and not self.show.is_anime: return results for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_params = { 'terms': search_string, @@ -70,19 +90,21 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: + logger.log('No data returned from provider', logger.DEBUG) continue with BS4Parser(data, 'html5lib') as soup: torrent_table = soup.find('table', class_='listing') torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if one Release is found + # Continue only if at least one release is found if len(torrent_rows) < 2: logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue a = 1 if len(torrent_rows[0]('td')) < 2 else 0 + # Skip column headers for top, bot in zip(torrent_rows[a::2], torrent_rows[a + 1::2]): try: desc_top = top.find('td', class_='desc-top') @@ -100,7 +122,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -114,7 +136,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/torrentbytes.py b/sickbeard/providers/torrentbytes.py index 7bd09655e8..a5fc036270 100644 --- a/sickbeard/providers/torrentbytes.py +++ b/sickbeard/providers/torrentbytes.py @@ -32,7 +32,7 @@ class TorrentBytesProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """TorrentBytes Torrent provider""" def __init__(self): # Provider Init @@ -42,11 +42,6 @@ def __init__(self): self.username = None self.password = None - # Torrent Stats - self.minseed = None - self.minleech = None - self.freeleech = False - # URLs self.url = 'https://www.torrentbytes.net' self.urls = { @@ -57,6 +52,13 @@ def __init__(self): # Proper Strings self.proper_strings = ['PROPER', 'REPACK'] + # Miscellaneous Options + self.freeleech = False + + # Torrent Stats + self.minseed = None + self.minleech = None + # Cache self.cache = tvcache.TVCache(self) @@ -64,9 +66,11 @@ def login(self): if any(dict_from_cookiejar(self.session.cookies).values()): return True - login_params = {'username': self.username, - 'password': self.password, - 'login': 'Log in!'} + login_params = { + 'username': self.username, + 'password': self.password, + 'login': 'Log in!', + } response = self.get_url(self.urls['login'], post_data=login_params, returns='text') if not response: @@ -84,6 +88,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if not self.login(): return results + # Search Params search_params = { 'c41': 1, 'c33': 1, @@ -94,12 +99,13 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_params['search'] = search_string data = self.get_url(self.urls['search'], params=search_params, returns='text') @@ -111,7 +117,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man torrent_table = html.find('table', border='1') torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if at least one Release is found + # Continue only if at least one release is found if len(torrent_rows) < 2: logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue @@ -119,10 +125,13 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # "Type", "Name", Files", "Comm.", "Added", "TTL", "Size", "Snatched", "Seeders", "Leechers" labels = [label.get_text(strip=True) for label in torrent_rows[0]('td')] + # Skip column headers for result in torrent_rows[1:]: - try: - cells = result('td') + cells = result('td') + if len(cells) < len(labels): + continue + try: download_url = urljoin(self.url, cells[labels.index('Name')].find('a', href=re.compile(r'download.php\?id='))['href']) title_element = cells[labels.index('Name')].find('a', href=re.compile(r'details.php\?id=')) title = title_element.get('title', '') or title_element.get_text(strip=True) @@ -142,7 +151,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -156,7 +165,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py index d72742fa56..8aa01ff532 100644 --- a/sickbeard/providers/torrentday.py +++ b/sickbeard/providers/torrentday.py @@ -32,7 +32,7 @@ class TorrentDayProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """TorrentDay Torrent provider""" def __init__(self): # Provider Init @@ -44,11 +44,6 @@ def __init__(self): self._uid = None self._hash = None - # Torrent Stats - self.minseed = None - self.minleech = None - self.freeleech = False - # URLs self.url = 'https://classic.torrentday.com' self.urls = { @@ -57,10 +52,18 @@ def __init__(self): 'download': urljoin(self.url, '/download.php/') } + # Proper Strings + + # Miscellaneous Options + self.freeleech = False self.cookies = None self.categories = {'Season': {'c14': 1}, 'Episode': {'c2': 1, 'c26': 1, 'c7': 1, 'c24': 1}, 'RSS': {'c2': 1, 'c26': 1, 'c7': 1, 'c24': 1, 'c14': 1}} + # Torrent Stats + self.minseed = None + self.minleech = None + # Cache self.cache = tvcache.TVCache(self, min_time=10) # Only poll IPTorrents every 10 minutes max @@ -76,7 +79,7 @@ def login(self): 'username': self.username, 'password': self.password, 'submit.x': 0, - 'submit.y': 0 + 'submit.y': 0, } response = self.get_url(self.urls['login'], post_data=login_params, returns='text') @@ -108,13 +111,13 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_string = '+'.join(search_string.split()) @@ -156,7 +159,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -170,7 +173,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py index 2d51fafd4b..d761539208 100644 --- a/sickbeard/providers/torrentleech.py +++ b/sickbeard/providers/torrentleech.py @@ -32,7 +32,7 @@ class TorrentLeechProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """TorrentLeech Torrent provider""" def __init__(self): # Provider Init @@ -42,10 +42,6 @@ def __init__(self): self.username = None self.password = None - # Torrent Stats - self.minseed = None - self.minleech = None - # URLs self.url = 'https://torrentleech.org' self.urls = { @@ -56,6 +52,12 @@ def __init__(self): # Proper Strings self.proper_strings = ['PROPER', 'REPACK'] + # Miscellaneous Options + + # Torrent Stats + self.minseed = None + self.minleech = None + # Cache self.cache = tvcache.TVCache(self) @@ -81,7 +83,15 @@ def login(self): return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + TorrentLeech search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ results = [] if not self.login(): return results @@ -102,13 +112,13 @@ def process_column_header(td): for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) categories = ['2', '7', '35'] categories += ['26', '32'] if mode == 'Episode' else ['27'] @@ -131,7 +141,7 @@ def process_column_header(td): torrent_table = html.find('table', id='torrenttable') torrent_rows = torrent_table('tr') if torrent_table else [] - # Continue only if at least one Release is found + # Continue only if at least one release is found if len(torrent_rows) < 2: logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue @@ -167,7 +177,7 @@ def process_column_header(td): 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/torrentproject.py b/sickbeard/providers/torrentproject.py index a7ef07e76b..0f4ef98ed2 100644 --- a/sickbeard/providers/torrentproject.py +++ b/sickbeard/providers/torrentproject.py @@ -38,18 +38,19 @@ def __init__(self): # Credentials self.public = True - # Torrent Stats - self.minseed = None - self.minleech = None - # URLs self.url = 'https://torrentproject.se/' - self.custom_url = None - self.headers.update({'User-Agent': USER_AGENT}) # Proper Strings + # Miscellaneous Options + self.headers.update({'User-Agent': USER_AGENT}) + + # Torrent Stats + self.minseed = None + self.minleech = None + # Cache self.cache = tvcache.TVCache(self, search_params={'RSS': ['0day']}) @@ -65,13 +66,13 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: # Mode = RSS, Season, Episode items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - logger.log('Search string: {0}'.format(search_string.decode('utf-8')), - logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) search_params['s'] = search_string @@ -103,7 +104,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1}'.format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -118,7 +119,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': torrent_hash + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/torrentz.py b/sickbeard/providers/torrentz.py index 2507674603..f4588d5a7b 100644 --- a/sickbeard/providers/torrentz.py +++ b/sickbeard/providers/torrentz.py @@ -30,7 +30,7 @@ class TorrentzProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes - + """Torrentz Torrent provider""" def __init__(self): # Provider Init @@ -40,10 +40,6 @@ def __init__(self): self.public = True self.confirmed = True - # Torrent Stats - self.minseed = None - self.minleech = None - # URLs self.url = 'https://torrentz.eu/' self.urls = { @@ -51,9 +47,15 @@ def __init__(self): 'feed': 'https://torrentz.eu/feed', 'base': self.url, } - self.headers.update({'User-Agent': USER_AGENT}) # Proper Strings + self.headers.update({'User-Agent': USER_AGENT}) + + # Miscellaneous Options + + # Torrent Stats + self.minseed = None + self.minleech = None # Cache self.cache = tvcache.TVCache(self, min_time=15) # only poll Torrentz every 15 minutes max @@ -68,16 +70,17 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for mode in search_strings: items = [] - logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) + for search_string in search_strings[mode]: search_url = self.urls['verified'] if self.confirmed else self.urls['feed'] if mode != 'RSS': - logger.log('Search string: {0}'.format - (search_string), logger.DEBUG) + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) data = self.get_url(search_url, params={'q': search_string}, returns='text') if not data: - logger.log("No data returned from provider", logger.DEBUG) + logger.log('No data returned from provider', logger.DEBUG) continue if not data.startswith(" Date: Sat, 18 Jun 2016 09:06:54 -0400 Subject: [PATCH 073/134] Standardize method names and order --- sickbeard/__init__.py | 2 +- sickbeard/providers/abnormal.py | 40 +-- sickbeard/providers/alpharatio.py | 46 +-- sickbeard/providers/anizb.py | 8 +- sickbeard/providers/bluetigers.py | 49 ++- sickbeard/providers/btn.py | 130 ++++---- sickbeard/providers/danishbits.py | 50 +-- sickbeard/providers/freshontv.py | 102 +++--- sickbeard/providers/gftracker.py | 60 ++-- sickbeard/providers/hd4free.py | 13 +- sickbeard/providers/hdbits.py | 58 ++-- sickbeard/providers/hdspace.py | 62 ++-- sickbeard/providers/hdtorrents.py | 58 ++-- sickbeard/providers/hounddawgs.py | 50 +-- sickbeard/providers/ilovetorrents.py | 56 ++-- sickbeard/providers/iptorrents.py | 74 ++--- sickbeard/providers/morethantv.py | 60 ++-- sickbeard/providers/newpct.py | 58 ++-- sickbeard/providers/newznab.py | 389 ++++++++++++----------- sickbeard/providers/norbits.py | 38 +-- sickbeard/providers/omgwtfnzbs.py | 90 +++--- sickbeard/providers/pretome.py | 56 ++-- sickbeard/providers/rarbg.py | 38 +-- sickbeard/providers/rsstorrent.py | 52 +-- sickbeard/providers/scc.py | 54 ++-- sickbeard/providers/scenetime.py | 40 +-- sickbeard/providers/shazbat.py | 4 +- sickbeard/providers/speedcd.py | 40 +-- sickbeard/providers/t411.py | 50 +-- sickbeard/providers/tntvillage.py | 152 ++++----- sickbeard/providers/torrentbytes.py | 42 +-- sickbeard/providers/torrentday.py | 74 ++--- sickbeard/providers/torrentleech.py | 44 +-- sickbeard/providers/torrentz.py | 10 +- sickbeard/providers/transmitthenet.py | 60 ++-- sickbeard/providers/tvchaosuk.py | 60 ++-- sickbeard/providers/xthor.py | 42 +-- sickbeard/server/web/config/providers.py | 12 +- 38 files changed, 1161 insertions(+), 1162 deletions(-) diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py index 809da9f5eb..95a6e1e01f 100644 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -2189,7 +2189,7 @@ def save_config(): # pylint: disable=too-many-statements, too-many-branches new_config['Newznab']['newznab_data'] = NEWZNAB_DATA new_config['TorrentRss'] = {} - new_config['TorrentRss']['torrentrss_data'] = '!!!'.join([x.configStr() for x in torrentRssProviderList]) + new_config['TorrentRss']['torrentrss_data'] = '!!!'.join([x.config_string() for x in torrentRssProviderList]) new_config['GUI'] = {} new_config['GUI']['gui_name'] = GUI_NAME diff --git a/sickbeard/providers/abnormal.py b/sickbeard/providers/abnormal.py index aed5692913..19de730f04 100644 --- a/sickbeard/providers/abnormal.py +++ b/sickbeard/providers/abnormal.py @@ -61,26 +61,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self, min_time=30) - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if not re.search('torrents.php', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ ABNormal search and parsing @@ -193,5 +173,25 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if not re.search('torrents.php', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + provider = ABNormalProvider() diff --git a/sickbeard/providers/alpharatio.py b/sickbeard/providers/alpharatio.py index 5be2d498fc..0f5934c380 100644 --- a/sickbeard/providers/alpharatio.py +++ b/sickbeard/providers/alpharatio.py @@ -61,29 +61,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'login': 'submit', - 'remember_me': 'on', - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('Invalid Username/password', response) \ - or re.search('Login :: AlphaRatio.cc', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ AlphaRatio search and parsing @@ -197,5 +174,28 @@ def process_column_header(td): return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'login': 'submit', + 'remember_me': 'on', + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('Invalid Username/password', response) \ + or re.search('Login :: AlphaRatio.cc', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + provider = AlphaRatioProvider() diff --git a/sickbeard/providers/anizb.py b/sickbeard/providers/anizb.py index a97efe0221..c39d3f6426 100644 --- a/sickbeard/providers/anizb.py +++ b/sickbeard/providers/anizb.py @@ -58,10 +58,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) - def _get_size(self, item): - """Override the default _get_size to prevent it from extracting using it the default tags""" - return try_int(item.get('size')) - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals """Start searching for anime using the provided search_strings. Used for backlog and daily""" results = [] @@ -120,5 +116,9 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def _get_size(self, item): + """Override the default _get_size to prevent it from extracting using it the default tags""" + return try_int(item.get('size')) + provider = Anizb() diff --git a/sickbeard/providers/bluetigers.py b/sickbeard/providers/bluetigers.py index 8a0808fa83..5c12dcb83d 100644 --- a/sickbeard/providers/bluetigers.py +++ b/sickbeard/providers/bluetigers.py @@ -62,31 +62,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self, min_time=10) # Only poll BLUETIGERS every 10 minutes max - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'take_login': '1' - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - check_login = self.get_url(self.urls['base_url'], returns='text') - if re.search('account-logout.php', check_login): - return True - else: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('account-login.php', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals """ BLUETIGERS search and parsing @@ -180,5 +155,29 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'take_login': '1' + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + check_login = self.get_url(self.urls['base_url'], returns='text') + if re.search('account-logout.php', check_login): + return True + else: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('account-login.php', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True provider = BlueTigersProvider() diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py index bdd84a1668..dce5604685 100644 --- a/sickbeard/providers/btn.py +++ b/sickbeard/providers/btn.py @@ -62,24 +62,6 @@ def __init__(self): # Cache self.cache = BTNCache(self, min_time=15) # Only poll BTN every 15 minutes max - def _check_auth(self): - if not self.api_key: - logger.log('Invalid api key. Check your settings', logger.WARNING) - - return True - - def _checkAuthFromData(self, parsed_json): - - if parsed_json is None: - return self._check_auth() - - if 'api-error' in parsed_json: - logger.log('Incorrect authentication credentials: %s' % parsed_json['api-error'], logger.DEBUG) - raise AuthException('Your authentication credentials for {0} are missing,' - ' check your config.'.format(self.name)) - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint:disable=too-many-locals """ BTN search and parsing @@ -109,7 +91,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint:disable=too-many logger.log('No data returned from provider', logger.DEBUG) return results - if self._checkAuthFromData(parsed_json): + if self._check_auth_from_data(parsed_json): found_torrents = parsed_json.get('torrents', {}) @@ -143,38 +125,23 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint:disable=too-many # FIXME SORT RESULTS return results - def _api_call(self, apikey, params=None, results_per_page=1000, offset=0): - - server = jsonrpclib.Server(self.urls['base_url']) - parsed_json = {} - - try: - parsed_json = server.getTorrents(apikey, params or {}, int(results_per_page), int(offset)) - time.sleep(cpu_presets[sickbeard.CPU_PRESET]) + def _check_auth(self): + if not self.api_key: + logger.log('Invalid api key. Check your settings', logger.WARNING) - except jsonrpclib.jsonrpc.ProtocolError, error: - if error.message == 'Call Limit Exceeded': - logger.log('You have exceeded the limit of 150 calls per hour,' - ' per API key which is unique to your user account', logger.WARNING) - else: - logger.log('JSON-RPC protocol error while accessing provicer. Error: %s ' % repr(error), logger.ERROR) - parsed_json = {'api-error': ex(error)} - return parsed_json + return True - except socket.timeout: - logger.log('Timeout while accessing provider', logger.WARNING) + def _check_auth_from_data(self, parsed_json): - except socket.error, error: - # Note that sometimes timeouts are thrown as socket errors - logger.log('Socket error while accessing provider. Error: %s ' % error[1], logger.WARNING) + if parsed_json is None: + return self._check_auth() - except Exception, error: - errorstring = str(error) - if errorstring.startswith('<') and errorstring.endswith('>'): - errorstring = errorstring[1:-1] - logger.log('Unknown error while accessing provider. Error: %s ' % errorstring, logger.WARNING) + if 'api-error' in parsed_json: + logger.log('Incorrect authentication credentials: %s' % parsed_json['api-error'], logger.DEBUG) + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) - return parsed_json + return True def _get_title_and_url(self, parsed_json): @@ -210,6 +177,26 @@ def _get_title_and_url(self, parsed_json): return title, url + def find_propers(self, search_date=None): + results = [] + + search_terms = ['%.proper.%', '%.repack.%'] + + for term in search_terms: + for item in self.search({'release': term}, age=4 * 24 * 60 * 60): + if item['Time']: + try: + result_date = datetime.fromtimestamp(float(item['Time'])) + except TypeError: + result_date = None + + if result_date: + if not search_date or result_date > search_date: + title, url = self._get_title_and_url(item) + results.append(classes.Proper(title, url, result_date, self.show)) + + return results + def _get_season_search_strings(self, ep_obj): search_params = [] current_params = {'category': 'Season'} @@ -272,30 +259,43 @@ def _get_episode_search_strings(self, ep_obj, add_string=''): return to_return - def _doGeneralSearch(self, search_string): - # 'search' looks as broad is it can find. Can contain episode overview and title for example, - # use with caution! - return self.search({'search': search_string}) + def _api_call(self, apikey, params=None, results_per_page=1000, offset=0): - def find_propers(self, search_date=None): - results = [] + server = jsonrpclib.Server(self.urls['base_url']) + parsed_json = {} - search_terms = ['%.proper.%', '%.repack.%'] + try: + parsed_json = server.getTorrents(apikey, params or {}, int(results_per_page), int(offset)) + time.sleep(cpu_presets[sickbeard.CPU_PRESET]) - for term in search_terms: - for item in self.search({'release': term}, age=4 * 24 * 60 * 60): - if item['Time']: - try: - result_date = datetime.fromtimestamp(float(item['Time'])) - except TypeError: - result_date = None + except jsonrpclib.jsonrpc.ProtocolError, error: + if error.message == 'Call Limit Exceeded': + logger.log('You have exceeded the limit of 150 calls per hour,' + ' per API key which is unique to your user account', logger.WARNING) + else: + logger.log('JSON-RPC protocol error while accessing provicer. Error: %s ' % repr(error), logger.ERROR) + parsed_json = {'api-error': ex(error)} + return parsed_json - if result_date: - if not search_date or result_date > search_date: - title, url = self._get_title_and_url(item) - results.append(classes.Proper(title, url, result_date, self.show)) + except socket.timeout: + logger.log('Timeout while accessing provider', logger.WARNING) - return results + except socket.error, error: + # Note that sometimes timeouts are thrown as socket errors + logger.log('Socket error while accessing provider. Error: %s ' % error[1], logger.WARNING) + + except Exception, error: + errorstring = str(error) + if errorstring.startswith('<') and errorstring.endswith('>'): + errorstring = errorstring[1:-1] + logger.log('Unknown error while accessing provider. Error: %s ' % errorstring, logger.WARNING) + + return parsed_json + + def _do_general_search(self, search_string): + # 'search' looks as broad is it can find. Can contain episode overview and title for example, + # use with caution! + return self.search({'search': search_string}) class BTNCache(tvcache.TVCache): diff --git a/sickbeard/providers/danishbits.py b/sickbeard/providers/danishbits.py index 3c4f834d79..de2f4a62ba 100644 --- a/sickbeard/providers/danishbits.py +++ b/sickbeard/providers/danishbits.py @@ -62,31 +62,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self, min_time=10) # Only poll Danishbits every 10 minutes max - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'keeplogged': 1, - 'langlang': '', - 'login': 'Login', - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - self.session.cookies.clear() - return False - - if 'Login :: Danishbits.org' in response: - logger.log('Invalid username or password. Check your settings', logger.WARNING) - self.session.cookies.clear() - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ DanishBits search and parsing @@ -200,5 +175,30 @@ def process_column_header(td): return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'keeplogged': 1, + 'langlang': '', + 'login': 'Login', + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + self.session.cookies.clear() + return False + + if 'Login :: Danishbits.org' in response: + logger.log('Invalid username or password. Check your settings', logger.WARNING) + self.session.cookies.clear() + return False + + return True + provider = DanishbitsProvider() diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index c09d9b268d..cddf247320 100644 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -68,57 +68,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) - def _check_auth(self): - - if not self.username or not self.password: - logger.log('Invalid username or password. Check your settings', logger.WARNING) - - return True - - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'login': 'submit', - 'action': 'makelogin', - } - - if self._uid and self._hash: - add_dict_to_cookiejar(self.session.cookies, self.cookies) - else: - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('/logout.php', response): - try: - if dict_from_cookiejar(self.session.cookies)['uid'] and \ - dict_from_cookiejar(self.session.cookies)['pass']: - self._uid = dict_from_cookiejar(self.session.cookies)['uid'] - self._hash = dict_from_cookiejar(self.session.cookies)['pass'] - - self.cookies = {'uid': self._uid, - 'pass': self._hash} - return True - except Exception: - logger.log('Unable to login to provider (cookie)', logger.WARNING) - - return False - else: - if re.search('Username does not exist in the userbase or the account is not confirmed yet.', response) or \ - re.search('Username or password is incorrect. If you have an account here please use the' - ' recovery system or try again.', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - - if re.search('DDoS protection by CloudFlare', response): - logger.log('Unable to login to provider due to CloudFlare DDoS javascript check', logger.WARNING) - - return False - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements results = [] if not self.login(): @@ -253,5 +202,56 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'login': 'submit', + 'action': 'makelogin', + } + + if self._uid and self._hash: + add_dict_to_cookiejar(self.session.cookies, self.cookies) + else: + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('/logout.php', response): + try: + if dict_from_cookiejar(self.session.cookies)['uid'] and \ + dict_from_cookiejar(self.session.cookies)['pass']: + self._uid = dict_from_cookiejar(self.session.cookies)['uid'] + self._hash = dict_from_cookiejar(self.session.cookies)['pass'] + + self.cookies = {'uid': self._uid, + 'pass': self._hash} + return True + except Exception: + logger.log('Unable to login to provider (cookie)', logger.WARNING) + + return False + else: + if re.search('Username does not exist in the userbase or the account is not confirmed yet.', response) or \ + re.search('Username or password is incorrect. If you have an account here please use the' + ' recovery system or try again.', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + + if re.search('DDoS protection by CloudFlare', response): + logger.log('Unable to login to provider due to CloudFlare DDoS javascript check', logger.WARNING) + + return False + + def _check_auth(self): + + if not self.username or not self.password: + logger.log('Invalid username or password. Check your settings', logger.WARNING) + + return True + provider = FreshOnTVProvider() diff --git a/sickbeard/providers/gftracker.py b/sickbeard/providers/gftracker.py index 18dd66e75a..a5d0110aa5 100644 --- a/sickbeard/providers/gftracker.py +++ b/sickbeard/providers/gftracker.py @@ -63,36 +63,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) - def _check_auth(self): - - if not self.username or not self.password: - raise AuthException('Your authentication credentials for {0} are missing,' - ' check your config.'.format(self.name)) - - return True - - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - } - - # Initialize session with a GET to have cookies - self.get_url(self.url, returns='text') - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('Username or password incorrect', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ GFT search and parsing @@ -208,5 +178,35 @@ def process_column_header(td): return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + } + + # Initialize session with a GET to have cookies + self.get_url(self.url, returns='text') + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('Username or password incorrect', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + + def _check_auth(self): + + if not self.username or not self.password: + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) + + return True + provider = GFTrackerProvider() diff --git a/sickbeard/providers/hd4free.py b/sickbeard/providers/hd4free.py index 306f924e38..02e0ba2a28 100644 --- a/sickbeard/providers/hd4free.py +++ b/sickbeard/providers/hd4free.py @@ -57,13 +57,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self, min_time=10) # Only poll HD4Free every 10 minutes max - def _check_auth(self): - if self.username and self.api_key: - return True - - logger.log('Your authentication credentials for %s are missing, check your config.' % self.name, logger.WARNING) - return False - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ HD4Free search and parsing @@ -165,5 +158,11 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def _check_auth(self): + if self.username and self.api_key: + return True + + logger.log('Your authentication credentials for %s are missing, check your config.' % self.name, logger.WARNING) + return False provider = HD4FreeProvider() diff --git a/sickbeard/providers/hdbits.py b/sickbeard/providers/hdbits.py index 4c3ed2e61c..1107992e93 100644 --- a/sickbeard/providers/hdbits.py +++ b/sickbeard/providers/hdbits.py @@ -56,35 +56,6 @@ def __init__(self): # Cache self.cache = HDBitsCache(self, min_time=15) # only poll HDBits every 15 minutes max - def _check_auth(self): - - if not self.username or not self.passkey: - raise AuthException('Your authentication credentials for ' + self.name + ' are missing, check your config.') - - return True - - def _check_auth_from_data(self, parsed_json): - - if 'status' in parsed_json and 'message' in parsed_json: - if parsed_json.get('status') == 5: - logger.log('Invalid username or password. Check your settings', logger.WARNING) - - return True - - def _get_season_search_strings(self, ep_obj): - season_search_string = [self._make_post_data_json(show=ep_obj.show, season=ep_obj)] - return season_search_string - - def _get_episode_search_strings(self, ep_obj, add_string=''): - episode_search_string = [self._make_post_data_json(show=ep_obj.show, episode=ep_obj)] - return episode_search_string - - def _get_title_and_url(self, item): - title = item.get('name', '').replace(' ', '.') - url = self.urls['download'] + '?' + urlencode({'id': item['id'], 'passkey': self.passkey}) - - return title, url - def search(self, search_strings, age=0, ep_obj=None): # FIXME @@ -110,6 +81,27 @@ def search(self, search_strings, age=0, ep_obj=None): # FIXME SORTING return results + def _check_auth(self): + + if not self.username or not self.passkey: + raise AuthException('Your authentication credentials for ' + self.name + ' are missing, check your config.') + + return True + + def _check_auth_from_data(self, parsed_json): + + if 'status' in parsed_json and 'message' in parsed_json: + if parsed_json.get('status') == 5: + logger.log('Invalid username or password. Check your settings', logger.WARNING) + + return True + + def _get_title_and_url(self, item): + title = item.get('name', '').replace(' ', '.') + url = self.urls['download'] + '?' + urlencode({'id': item['id'], 'passkey': self.passkey}) + + return title, url + def find_propers(self, search_date=None): results = [] @@ -130,6 +122,14 @@ def find_propers(self, search_date=None): return results + def _get_season_search_strings(self, ep_obj): + season_search_string = [self._make_post_data_json(show=ep_obj.show, season=ep_obj)] + return season_search_string + + def _get_episode_search_strings(self, ep_obj, add_string=''): + episode_search_string = [self._make_post_data_json(show=ep_obj.show, episode=ep_obj)] + return episode_search_string + def _make_post_data_json(self, show=None, episode=None, season=None, search_term=None): post_data = { diff --git a/sickbeard/providers/hdspace.py b/sickbeard/providers/hdspace.py index d58f71e753..da7b6757b1 100644 --- a/sickbeard/providers/hdspace.py +++ b/sickbeard/providers/hdspace.py @@ -70,37 +70,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self, min_time=10) # only poll HDSpace every 10 minutes max - def _check_auth(self): - - if not self.username or not self.password: - logger.log('Invalid username or password. Check your settings', logger.WARNING) - - return True - - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - if 'pass' in dict_from_cookiejar(self.session.cookies): - return True - - login_params = { - 'uid': self.username, - 'pwd': self.password, - 'page': 'login', - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('Password Incorrect', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ HDSpace search and parsing @@ -201,5 +170,36 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + if 'pass' in dict_from_cookiejar(self.session.cookies): + return True + + login_params = { + 'uid': self.username, + 'pwd': self.password, + 'page': 'login', + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('Password Incorrect', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + + def _check_auth(self): + + if not self.username or not self.password: + logger.log('Invalid username or password. Check your settings', logger.WARNING) + + return True + provider = HDSpaceProvider() diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index d7d4070628..cefd5b7d10 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -63,35 +63,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self, min_time=30) - def _check_auth(self): - - if not self.username or not self.password: - raise AuthException('Your authentication credentials for {0} are missing,' - ' check your config.'.format(self.name)) - - return True - - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'uid': self.username, - 'pwd': self.password, - 'submit': 'Confirm' - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('You need cookies enabled to log in.', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ HDTorrents search and parsing @@ -205,5 +176,34 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'uid': self.username, + 'pwd': self.password, + 'submit': 'Confirm' + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('You need cookies enabled to log in.', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + + def _check_auth(self): + + if not self.username or not self.password: + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) + + return True + provider = HDTorrentsProvider() diff --git a/sickbeard/providers/hounddawgs.py b/sickbeard/providers/hounddawgs.py index 5ae3c86730..81a6717734 100644 --- a/sickbeard/providers/hounddawgs.py +++ b/sickbeard/providers/hounddawgs.py @@ -63,31 +63,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'keeplogged': 'on', - 'login': 'Login' - } - - self.get_url(self.urls['base_url'], returns='text') - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('Dit brugernavn eller kodeord er forkert.', response) \ - or re.search('Login :: HoundDawgs', response) \ - or re.search('Dine cookies er ikke aktiveret.', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ HoundDawgs search and parsing @@ -209,5 +184,30 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'keeplogged': 'on', + 'login': 'Login' + } + + self.get_url(self.urls['base_url'], returns='text') + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('Dit brugernavn eller kodeord er forkert.', response) \ + or re.search('Login :: HoundDawgs', response) \ + or re.search('Dine cookies er ikke aktiveret.', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + provider = HoundDawgsProvider() diff --git a/sickbeard/providers/ilovetorrents.py b/sickbeard/providers/ilovetorrents.py index 9c681919f2..59f14d5a52 100644 --- a/sickbeard/providers/ilovetorrents.py +++ b/sickbeard/providers/ilovetorrents.py @@ -64,34 +64,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) - def _check_auth(self): - if not self.username or not self.password: - logger.log(u'Invalid username or password. Check your settings', logger.WARNING) - - return True - - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'logout': 'false', - 'submit': 'Welcome to ILT' - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('Username or password incorrect', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ ILoveTorrents search and parsing @@ -188,5 +160,33 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'logout': 'false', + 'submit': 'Welcome to ILT' + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('Username or password incorrect', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + + def _check_auth(self): + if not self.username or not self.password: + logger.log(u'Invalid username or password. Check your settings', logger.WARNING) + + return True + provider = ILoveTorrentsProvider() diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py index d7945fbe0b..788339e9ac 100644 --- a/sickbeard/providers/iptorrents.py +++ b/sickbeard/providers/iptorrents.py @@ -64,43 +64,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self, min_time=10) # Only poll IPTorrents every 10 minutes max - def _check_auth(self): - - if not self.username or not self.password: - raise AuthException('Your authentication credentials for {0} are missing,' - ' check your config.'.format(self.name)) - - return True - - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'login': 'submit', - } - - self.get_url(self.urls['login'], returns='text') - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - # Invalid username and password combination - if re.search('Invalid username and password combination', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - # You tried too often, please try again after 2 hours! - if re.search('You tried too often', response): - logger.log('You tried too often, please try again after 2 hours!' - ' Disable IPTorrents for at least 2 hours', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements results = [] if not self.login(): @@ -182,5 +145,42 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'login': 'submit', + } + + self.get_url(self.urls['login'], returns='text') + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + # Invalid username and password combination + if re.search('Invalid username and password combination', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + # You tried too often, please try again after 2 hours! + if re.search('You tried too often', response): + logger.log('You tried too often, please try again after 2 hours!' + ' Disable IPTorrents for at least 2 hours', logger.WARNING) + return False + + return True + + def _check_auth(self): + + if not self.username or not self.password: + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) + + return True + provider = IPTorrentsProvider() diff --git a/sickbeard/providers/morethantv.py b/sickbeard/providers/morethantv.py index b4a08ba474..21c75ccabb 100644 --- a/sickbeard/providers/morethantv.py +++ b/sickbeard/providers/morethantv.py @@ -64,36 +64,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) - def _check_auth(self): - - if not self.username or not self.password: - raise AuthException('Your authentication credentials for {0} are missing,' - ' check your config.'.format(self.name)) - - return True - - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'keeplogged': '1', - 'login': 'Log in', - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('Your username or password was incorrect.', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ MoreThanTV search and parsing @@ -209,5 +179,35 @@ def process_column_header(td): return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'keeplogged': '1', + 'login': 'Log in', + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('Your username or password was incorrect.', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + + def _check_auth(self): + + if not self.username or not self.password: + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) + + return True + provider = MoreThanTVProvider() diff --git a/sickbeard/providers/newpct.py b/sickbeard/providers/newpct.py index b41b37a38e..fd8a099ef0 100644 --- a/sickbeard/providers/newpct.py +++ b/sickbeard/providers/newpct.py @@ -126,7 +126,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man try: torrent_row = row.find('a') - title = self._processTitle(torrent_row.get('title', '')) + title = self._process_title(torrent_row.get('title', '')) download_url = torrent_row.get('href', '') if not all([title, download_url]): continue @@ -159,6 +159,34 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + @staticmethod + def _process_title(title): + # Remove 'Mas informacion sobre ' literal from title + title = title[22:] + + # Quality - Use re module to avoid case sensitive problems with replace + title = re.sub(r'\[HDTV 1080p[^\[]*]', '1080p HDTV x264', title, flags=re.IGNORECASE) + title = re.sub(r'\[HDTV 720p[^\[]*]', '720p HDTV x264', title, flags=re.IGNORECASE) + title = re.sub(r'\[ALTA DEFINICION 720p[^\[]*]', '720p HDTV x264', title, flags=re.IGNORECASE) + title = re.sub(r'\[HDTV]', 'HDTV x264', title, flags=re.IGNORECASE) + title = re.sub(r'\[DVD[^\[]*]', 'DVDrip x264', title, flags=re.IGNORECASE) + title = re.sub(r'\[BluRay 1080p[^\[]*]', '1080p BlueRay x264', title, flags=re.IGNORECASE) + title = re.sub(r'\[BluRay MicroHD[^\[]*]', '1080p BlueRay x264', title, flags=re.IGNORECASE) + title = re.sub(r'\[MicroHD 1080p[^\[]*]', '1080p BlueRay x264', title, flags=re.IGNORECASE) + title = re.sub(r'\[BLuRay[^\[]*]', '720p BlueRay x264', title, flags=re.IGNORECASE) + title = re.sub(r'\[BRrip[^\[]*]', '720p BlueRay x264', title, flags=re.IGNORECASE) + title = re.sub(r'\[BDrip[^\[]*]', '720p BlueRay x264', title, flags=re.IGNORECASE) + + # Language + title = re.sub(r'\[Spanish[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE) + title = re.sub(r'\[Castellano[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE) + title = re.sub(r'\[Español[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE) + title = re.sub(r'\[AC3 5\.1 Español[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE) + + title += '-NEWPCT' + + return title.strip() + def get_url(self, url, post_data=None, params=None, timeout=30, **kwargs): # pylint: disable=too-many-arguments """ returns='content' when trying access to torrent info (For calling torrent client). Previously we must parse @@ -208,33 +236,5 @@ def download_result(self, result): return False - @staticmethod - def _processTitle(title): - # Remove 'Mas informacion sobre ' literal from title - title = title[22:] - - # Quality - Use re module to avoid case sensitive problems with replace - title = re.sub(r'\[HDTV 1080p[^\[]*]', '1080p HDTV x264', title, flags=re.IGNORECASE) - title = re.sub(r'\[HDTV 720p[^\[]*]', '720p HDTV x264', title, flags=re.IGNORECASE) - title = re.sub(r'\[ALTA DEFINICION 720p[^\[]*]', '720p HDTV x264', title, flags=re.IGNORECASE) - title = re.sub(r'\[HDTV]', 'HDTV x264', title, flags=re.IGNORECASE) - title = re.sub(r'\[DVD[^\[]*]', 'DVDrip x264', title, flags=re.IGNORECASE) - title = re.sub(r'\[BluRay 1080p[^\[]*]', '1080p BlueRay x264', title, flags=re.IGNORECASE) - title = re.sub(r'\[BluRay MicroHD[^\[]*]', '1080p BlueRay x264', title, flags=re.IGNORECASE) - title = re.sub(r'\[MicroHD 1080p[^\[]*]', '1080p BlueRay x264', title, flags=re.IGNORECASE) - title = re.sub(r'\[BLuRay[^\[]*]', '720p BlueRay x264', title, flags=re.IGNORECASE) - title = re.sub(r'\[BRrip[^\[]*]', '720p BlueRay x264', title, flags=re.IGNORECASE) - title = re.sub(r'\[BDrip[^\[]*]', '720p BlueRay x264', title, flags=re.IGNORECASE) - - # Language - title = re.sub(r'\[Spanish[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE) - title = re.sub(r'\[Castellano[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE) - title = re.sub(r'\[Español[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE) - title = re.sub(r'\[AC3 5\.1 Español[^\[]*]', 'SPANISH AUDIO', title, flags=re.IGNORECASE) - - title += '-NEWPCT' - - return title.strip() - provider = newpctProvider() diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index d4c8e2b482..223e4eb170 100644 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -76,199 +76,6 @@ def __init__(self, name, url, key='0', catIDs='5030,5040', search_mode='eponly', self.cache = tvcache.TVCache(self, min_time=30) # only poll newznab providers every 30 minutes max - def configStr(self): - """ - Generates a '|' delimited string of instance attributes, for saving to config.ini - """ - return '|'.join([ - self.name, self.url, self.key, self.catIDs, str(int(self.enabled)), - self.search_mode, str(int(self.search_fallback)), - str(int(self.enable_daily)), str(int(self.enable_backlog)), str(int(self.enable_manualsearch)) - ]) - - @staticmethod - def get_providers_list(data): - default_list = [ - provider for provider in - (NewznabProvider._make_provider(x) for x in NewznabProvider._get_default_providers().split('!!!')) - if provider] - - providers_list = [ - provider for provider in - (NewznabProvider._make_provider(x) for x in data.split('!!!')) - if provider] - - seen_values = set() - providers_set = [] - - for provider in providers_list: - value = provider.name - - if value not in seen_values: - providers_set.append(provider) - seen_values.add(value) - - providers_list = providers_set - providers_dict = dict(zip([provider.name for provider in providers_list], providers_list)) - - for default in default_list: - if not default: - continue - - if default.name not in providers_dict: - default.default = True - providers_list.append(default) - else: - providers_dict[default.name].default = True - providers_dict[default.name].name = default.name - providers_dict[default.name].url = default.url - providers_dict[default.name].needs_auth = default.needs_auth - providers_dict[default.name].search_mode = default.search_mode - providers_dict[default.name].search_fallback = default.search_fallback - providers_dict[default.name].enable_daily = default.enable_daily - providers_dict[default.name].enable_backlog = default.enable_backlog - providers_dict[default.name].enable_manualsearch = default.enable_manualsearch - - return [provider for provider in providers_list if provider] - - def image_name(self): - """ - Checks if we have an image for this provider already. - Returns found image or the default newznab image - """ - if ek(os.path.isfile, - ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME, 'images', 'providers', - self.get_id() + '.png')): - return self.get_id() + '.png' - return 'newznab.png' - - def set_caps(self, data): - if not data: - return - - def _parse_cap(tag): - elm = data.find(tag) - return elm.get('supportedparams', 'True') if elm and elm.get('available') else '' - - self.cap_tv_search = _parse_cap('tv-search') - # self.cap_search = _parse_cap('search') - # self.cap_movie_search = _parse_cap('movie-search') - # self.cap_audio_search = _parse_cap('audio-search') - - # self.caps = any([self.cap_tv_search, self.cap_search, self.cap_movie_search, self.cap_audio_search]) - self.caps = any([self.cap_tv_search]) - - def get_newznab_categories(self, just_caps=False): - """ - Uses the newznab provider url and apikey to get the capabilities. - Makes use of the default newznab caps param. e.a. http://yournewznab/api?t=caps&apikey=skdfiw7823sdkdsfjsfk - Returns a tuple with (succes or not, array with dicts [{'id': '5070', 'name': 'Anime'}, - {'id': '5080', 'name': 'Documentary'}, {'id': '5020', 'name': 'Foreign'}...etc}], error message) - """ - return_categories = [] - - if not self._check_auth(): - return False, return_categories, 'Provider requires auth and your key is not set' - - url_params = {'t': 'caps'} - if self.needs_auth and self.key: - url_params['apikey'] = self.key - - data = self.get_url(urljoin(self.url, 'api'), params=url_params, returns='text') - if not data: - error_string = 'Error getting caps xml for [{0}]'.format(self.name) - logger.log(error_string, logger.WARNING) - return False, return_categories, error_string - - with BS4Parser(data, 'html5lib') as html: - if not html.find('categories'): - error_string = 'Error parsing caps xml for [{0}]'.format(self.name) - logger.log(error_string, logger.DEBUG) - return False, return_categories, error_string - - self.set_caps(html.find('searching')) - if just_caps: - return - - for category in html('category'): - if 'TV' in category.get('name', '') and category.get('id', ''): - return_categories.append({'id': category['id'], 'name': category['name']}) - for subcat in category('subcat'): - if subcat.get('name', '') and subcat.get('id', ''): - return_categories.append({'id': subcat['id'], 'name': subcat['name']}) - - return True, return_categories, '' - - @staticmethod - def _get_default_providers(): - # name|url|key|catIDs|enabled|search_mode|search_fallback|enable_daily|enable_backlog|enable_manualsearch - return 'NZB.Cat|https://nzb.cat/||5030,5040,5010|0|eponly|0|0|0|0!!!' + \ - 'NZBGeek|https://api.nzbgeek.info/||5030,5040|0|eponly|0|0|0|0!!!' + \ - 'NZBs.org|https://nzbs.org/||5030,5040|0|eponly|0|0|0|0!!!' + \ - 'Usenet-Crawler|https://www.usenet-crawler.com/||5030,5040|0|eponly|0|0|0|0!!!' + \ - 'DOGnzb|https://api.dognzb.cr/||5030,5040,5060,5070|0|eponly|0|0|0|0' - - def _check_auth(self): - """ - Checks that user has set their api key if it is needed - Returns: True/False - """ - if self.needs_auth and not self.key: - logger.log('Invalid api key. Check your settings', logger.WARNING) - return False - - return True - - def _checkAuthFromData(self, data): - """ - Checks that the returned data is valid - Returns: _check_auth if valid otherwise False if there is an error - """ - if data('categories') + data('item'): - return self._check_auth() - - try: - err_desc = data.error.attrs['description'] - if not err_desc: - raise - except (AttributeError, TypeError): - return self._check_auth() - - logger.log(ss(err_desc)) - - return False - - @staticmethod - def _make_provider(config): - if not config: - return None - - try: - values = config.split('|') - # Pad values with None for each missing value - values.extend([None for x in range(len(values), 10)]) - - (name, url, key, category_ids, enabled, - search_mode, search_fallback, - enable_daily, enable_backlog, enable_manualsearch - ) = values - - except ValueError: - logger.log('Skipping Newznab provider string: {config!r}, incorrect format'.format - (config=config), logger.ERROR) - return None - - new_provider = NewznabProvider( - name, url, key=key, catIDs=category_ids, - search_mode=search_mode or 'eponly', - search_fallback=search_fallback or 0, - enable_daily=enable_daily or 0, - enable_backlog=enable_backlog or 0, - enable_manualsearch=enable_manualsearch or 0) - new_provider.enabled = enabled == '1' - - return new_provider - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-arguments, too-many-locals, too-many-branches, too-many-statements """ Searches indexer using the params in search_strings, either for latest releases, or a string/id search @@ -331,7 +138,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man break with BS4Parser(data, 'html5lib') as html: - if not self._checkAuthFromData(html): + if not self._check_auth_from_data(html): break try: @@ -406,9 +213,203 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def _check_auth(self): + """ + Checks that user has set their api key if it is needed + Returns: True/False + """ + if self.needs_auth and not self.key: + logger.log('Invalid api key. Check your settings', logger.WARNING) + return False + + return True + + def _check_auth_from_data(self, data): + """ + Checks that the returned data is valid + Returns: _check_auth if valid otherwise False if there is an error + """ + if data('categories') + data('item'): + return self._check_auth() + + try: + err_desc = data.error.attrs['description'] + if not err_desc: + raise + except (AttributeError, TypeError): + return self._check_auth() + + logger.log(ss(err_desc)) + + return False + def _get_size(self, item): """ Gets size info from a result item Returns int size or -1 """ return try_int(item.get('size', -1), -1) + + def config_string(self): + """ + Generates a '|' delimited string of instance attributes, for saving to config.ini + """ + return '|'.join([ + self.name, self.url, self.key, self.catIDs, str(int(self.enabled)), + self.search_mode, str(int(self.search_fallback)), + str(int(self.enable_daily)), str(int(self.enable_backlog)), str(int(self.enable_manualsearch)) + ]) + + @staticmethod + def get_providers_list(data): + default_list = [ + provider for provider in + (NewznabProvider._make_provider(x) for x in NewznabProvider._get_default_providers().split('!!!')) + if provider] + + providers_list = [ + provider for provider in + (NewznabProvider._make_provider(x) for x in data.split('!!!')) + if provider] + + seen_values = set() + providers_set = [] + + for provider in providers_list: + value = provider.name + + if value not in seen_values: + providers_set.append(provider) + seen_values.add(value) + + providers_list = providers_set + providers_dict = dict(zip([provider.name for provider in providers_list], providers_list)) + + for default in default_list: + if not default: + continue + + if default.name not in providers_dict: + default.default = True + providers_list.append(default) + else: + providers_dict[default.name].default = True + providers_dict[default.name].name = default.name + providers_dict[default.name].url = default.url + providers_dict[default.name].needs_auth = default.needs_auth + providers_dict[default.name].search_mode = default.search_mode + providers_dict[default.name].search_fallback = default.search_fallback + providers_dict[default.name].enable_daily = default.enable_daily + providers_dict[default.name].enable_backlog = default.enable_backlog + providers_dict[default.name].enable_manualsearch = default.enable_manualsearch + + return [provider for provider in providers_list if provider] + + def image_name(self): + """ + Checks if we have an image for this provider already. + Returns found image or the default newznab image + """ + if ek(os.path.isfile, + ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME, 'images', 'providers', + self.get_id() + '.png')): + return self.get_id() + '.png' + return 'newznab.png' + + @staticmethod + def _make_provider(config): + if not config: + return None + + try: + values = config.split('|') + # Pad values with None for each missing value + values.extend([None for x in range(len(values), 10)]) + + (name, url, key, category_ids, enabled, + search_mode, search_fallback, + enable_daily, enable_backlog, enable_manualsearch + ) = values + + except ValueError: + logger.log('Skipping Newznab provider string: {config!r}, incorrect format'.format + (config=config), logger.ERROR) + return None + + new_provider = NewznabProvider( + name, url, key=key, catIDs=category_ids, + search_mode=search_mode or 'eponly', + search_fallback=search_fallback or 0, + enable_daily=enable_daily or 0, + enable_backlog=enable_backlog or 0, + enable_manualsearch=enable_manualsearch or 0) + new_provider.enabled = enabled == '1' + + return new_provider + + def set_caps(self, data): + if not data: + return + + def _parse_cap(tag): + elm = data.find(tag) + return elm.get('supportedparams', 'True') if elm and elm.get('available') else '' + + self.cap_tv_search = _parse_cap('tv-search') + # self.cap_search = _parse_cap('search') + # self.cap_movie_search = _parse_cap('movie-search') + # self.cap_audio_search = _parse_cap('audio-search') + + # self.caps = any([self.cap_tv_search, self.cap_search, self.cap_movie_search, self.cap_audio_search]) + self.caps = any([self.cap_tv_search]) + + def get_newznab_categories(self, just_caps=False): + """ + Uses the newznab provider url and apikey to get the capabilities. + Makes use of the default newznab caps param. e.a. http://yournewznab/api?t=caps&apikey=skdfiw7823sdkdsfjsfk + Returns a tuple with (succes or not, array with dicts [{'id': '5070', 'name': 'Anime'}, + {'id': '5080', 'name': 'Documentary'}, {'id': '5020', 'name': 'Foreign'}...etc}], error message) + """ + return_categories = [] + + if not self._check_auth(): + return False, return_categories, 'Provider requires auth and your key is not set' + + url_params = {'t': 'caps'} + if self.needs_auth and self.key: + url_params['apikey'] = self.key + + data = self.get_url(urljoin(self.url, 'api'), params=url_params, returns='text') + if not data: + error_string = 'Error getting caps xml for [{0}]'.format(self.name) + logger.log(error_string, logger.WARNING) + return False, return_categories, error_string + + with BS4Parser(data, 'html5lib') as html: + if not html.find('categories'): + error_string = 'Error parsing caps xml for [{0}]'.format(self.name) + logger.log(error_string, logger.DEBUG) + return False, return_categories, error_string + + self.set_caps(html.find('searching')) + if just_caps: + return + + for category in html('category'): + if 'TV' in category.get('name', '') and category.get('id', ''): + return_categories.append({'id': category['id'], 'name': category['name']}) + for subcat in category('subcat'): + if subcat.get('name', '') and subcat.get('id', ''): + return_categories.append({'id': subcat['id'], 'name': subcat['name']}) + + return True, return_categories, '' + + @staticmethod + def _get_default_providers(): + # name|url|key|catIDs|enabled|search_mode|search_fallback|enable_daily|enable_backlog|enable_manualsearch + return 'NZB.Cat|https://nzb.cat/||5030,5040,5010|0|eponly|0|0|0|0!!!' + \ + 'NZBGeek|https://api.nzbgeek.info/||5030,5040|0|eponly|0|0|0|0!!!' + \ + 'NZBs.org|https://nzbs.org/||5030,5040|0|eponly|0|0|0|0!!!' + \ + 'Usenet-Crawler|https://www.usenet-crawler.com/||5030,5040|0|eponly|0|0|0|0!!!' + \ + 'DOGnzb|https://api.dognzb.cr/||5030,5040,5060,5070|0|eponly|0|0|0|0' + diff --git a/sickbeard/providers/norbits.py b/sickbeard/providers/norbits.py index 7b7cb9307d..6a0a0f28b3 100644 --- a/sickbeard/providers/norbits.py +++ b/sickbeard/providers/norbits.py @@ -61,24 +61,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self, min_time=20) # only poll Norbits every 15 minutes max - def _check_auth(self): - - if not self.username or not self.passkey: - raise AuthException(('Your authentication credentials for %s are ' - 'missing, check your config.') % self.name) - - return True - - def _checkAuthFromData(self, parsed_json): # pylint: disable=invalid-name - """ Check that we are authenticated. """ - - if 'status' in parsed_json and 'message' in parsed_json: - if parsed_json.get('status') == 3: - logger.log('Invalid username or password. ' - 'Check your settings', logger.WARNING) - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals """ Do the actual searching and JSON parsing""" @@ -109,7 +91,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if not parsed_json: return results - if self._checkAuthFromData(parsed_json): + if self._check_auth_from_data(parsed_json): json_items = parsed_json.get('data', '') if not json_items: logger.log('Resulting JSON from provider is not correct, ' @@ -163,5 +145,23 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def _check_auth(self): + + if not self.username or not self.passkey: + raise AuthException(('Your authentication credentials for %s are ' + 'missing, check your config.') % self.name) + + return True + + def _check_auth_from_data(self, parsed_json): # pylint: disable=invalid-name + """ Check that we are authenticated. """ + + if 'status' in parsed_json and 'message' in parsed_json: + if parsed_json.get('status') == 3: + logger.log('Invalid username or password. ' + 'Check your settings', logger.WARNING) + + return True + provider = NorbitsProvider() # pylint: disable=invalid-name diff --git a/sickbeard/providers/omgwtfnzbs.py b/sickbeard/providers/omgwtfnzbs.py index ee34ace9c9..b56c959048 100644 --- a/sickbeard/providers/omgwtfnzbs.py +++ b/sickbeard/providers/omgwtfnzbs.py @@ -54,50 +54,6 @@ def __init__(self): # Cache self.cache = OmgwtfnzbsCache(self) - def _check_auth(self): - - if not self.username or not self.api_key: - logger.log('Invalid api key. Check your settings', logger.WARNING) - return False - - return True - - def _checkAuthFromData(self, parsed_data, is_XML=True): - - if not parsed_data: - return self._check_auth() - - if is_XML: - # provider doesn't return xml on error - return True - - if 'notice' in parsed_data: - description_text = parsed_data.get('notice') - if 'information is incorrect' in description_text: - logger.log('Invalid api key. Check your settings', logger.WARNING) - elif '0 results matched your terms' not in description_text: - logger.log('Unknown error: {0}'.format(description_text), logger.DEBUG) - return False - - return True - - def _get_title_and_url(self, item): - return item['release'], item['getnzb'] - - def _get_size(self, item): - size = item.get('sizebytes', -1) - - # Try to get the size from the summary tag - if size == -1: - # Units - units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'] - summary = item.get('summary') - if summary: - size_match = re.search(r'Size[^\d]*([0-9.]*.[A-Z]*)', summary) - size = convert_size(size_match.group(1), units=units) or -1 if size_match else -1 - - return try_int(size) - def search(self, search_strings, age=0, ep_obj=None): results = [] if not self._check_auth(): @@ -126,7 +82,7 @@ def search(self, search_strings, age=0, ep_obj=None): logger.log('No data returned from provider', logger.DEBUG) continue - if not self._checkAuthFromData(data, is_XML=False): + if not self._check_auth_from_data(data, is_XML=False): continue for item in data: @@ -145,6 +101,50 @@ def search(self, search_strings, age=0, ep_obj=None): return results + def _check_auth(self): + + if not self.username or not self.api_key: + logger.log('Invalid api key. Check your settings', logger.WARNING) + return False + + return True + + def _check_auth_from_data(self, parsed_data, is_XML=True): + + if not parsed_data: + return self._check_auth() + + if is_XML: + # provider doesn't return xml on error + return True + + if 'notice' in parsed_data: + description_text = parsed_data.get('notice') + if 'information is incorrect' in description_text: + logger.log('Invalid api key. Check your settings', logger.WARNING) + elif '0 results matched your terms' not in description_text: + logger.log('Unknown error: {0}'.format(description_text), logger.DEBUG) + return False + + return True + + def _get_title_and_url(self, item): + return item['release'], item['getnzb'] + + def _get_size(self, item): + size = item.get('sizebytes', -1) + + # Try to get the size from the summary tag + if size == -1: + # Units + units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'] + summary = item.get('summary') + if summary: + size_match = re.search(r'Size[^\d]*([0-9.]*.[A-Z]*)', summary) + size = convert_size(size_match.group(1), units=units) or -1 if size_match else -1 + + return try_int(size) + class OmgwtfnzbsCache(tvcache.TVCache): def _get_title_and_url(self, item): diff --git a/sickbeard/providers/pretome.py b/sickbeard/providers/pretome.py index 59af9edf38..dbcda2881d 100644 --- a/sickbeard/providers/pretome.py +++ b/sickbeard/providers/pretome.py @@ -67,34 +67,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) - def _check_auth(self): - - if not self.username or not self.password or not self.pin: - logger.log('Invalid username or password or pin. Check your settings', logger.WARNING) - - return True - - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'login_pin': self.pin, - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('Username or password incorrect', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ Pretome search and parsing @@ -194,5 +166,33 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'login_pin': self.pin, + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('Username or password incorrect', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + + def _check_auth(self): + + if not self.username or not self.password or not self.pin: + logger.log('Invalid username or password or pin. Check your settings', logger.WARNING) + + return True + provider = PretomeProvider() diff --git a/sickbeard/providers/rarbg.py b/sickbeard/providers/rarbg.py index fa376ba62c..618f408065 100644 --- a/sickbeard/providers/rarbg.py +++ b/sickbeard/providers/rarbg.py @@ -61,25 +61,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self, min_time=10) # only poll RARBG every 10 minutes max - def login(self): - if self.token and self.token_expires and datetime.datetime.now() < self.token_expires: - return True - - login_params = { - 'get_token': 'get_token', - 'format': 'json', - 'app_id': 'sickrage2', - } - - response = self.get_url(self.urls['api'], params=login_params, returns='json') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - self.token = response.get('token') - self.token_expires = datetime.datetime.now() + datetime.timedelta(minutes=14) if self.token else None - return self.token is not None - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals, too-many-statements """ RARBG search and parsing @@ -213,5 +194,24 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if self.token and self.token_expires and datetime.datetime.now() < self.token_expires: + return True + + login_params = { + 'get_token': 'get_token', + 'format': 'json', + 'app_id': 'sickrage2', + } + + response = self.get_url(self.urls['api'], params=login_params, returns='json') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + self.token = response.get('token') + self.token_expires = datetime.datetime.now() + datetime.timedelta(minutes=14) if self.token else None + return self.token is not None + provider = RarbgProvider() diff --git a/sickbeard/providers/rsstorrent.py b/sickbeard/providers/rsstorrent.py index d236a17e54..bb8236b04f 100644 --- a/sickbeard/providers/rsstorrent.py +++ b/sickbeard/providers/rsstorrent.py @@ -55,7 +55,29 @@ def __init__(self, name, url, cookies='', # pylint: disable=too-many-arguments self.cookies = cookies self.titleTAG = titleTAG - def configStr(self): # pylint: disable=too-many-arguments + def _get_title_and_url(self, item): + + title = item.get(self.titleTAG, '').replace(' ', '.') + + attempt_list = [ + lambda: item.get('torrent_magneturi'), + lambda: item.enclosures[0].href, + lambda: item.get('link') + ] + + url = None + for cur_attempt in attempt_list: + try: + url = cur_attempt() + except Exception: + continue + + if title and url: + break + + return title, url + + def config_string(self): # pylint: disable=too-many-arguments return '{}|{}|{}|{}|{}|{}|{}|{}|{}'.format( self.name or '', self.url or '', @@ -89,28 +111,6 @@ def image_name(self): return self.get_id() + '.png' return 'torrentrss.png' - def _get_title_and_url(self, item): - - title = item.get(self.titleTAG, '').replace(' ', '.') - - attempt_list = [ - lambda: item.get('torrent_magneturi'), - lambda: item.enclosures[0].href, - lambda: item.get('link') - ] - - url = None - for cur_attempt in attempt_list: - try: - url = cur_attempt() - except Exception: - continue - - if title and url: - break - - return title, url - @staticmethod def _make_provider(config): if not config: @@ -149,7 +149,7 @@ def _make_provider(config): return new_provider - def validateRSS(self): # pylint: disable=too-many-return-statements + def validate_rss(self): # pylint: disable=too-many-return-statements try: if self.cookies: @@ -179,7 +179,7 @@ def validateRSS(self): # pylint: disable=too-many-return-statements try: bdecode(torrent_file) except Exception as error: - self.dumpHTML(torrent_file) + self.dump_html(torrent_file) return False, 'Torrent link is not a valid torrent file: {0}'.format(ex(error)) return True, 'RSS feed Parsed correctly' @@ -188,7 +188,7 @@ def validateRSS(self): # pylint: disable=too-many-return-statements return False, 'Error when trying to load RSS: {0}'.format(ex(error)) @staticmethod - def dumpHTML(data): + def dump_html(data): dump_name = ek(os.path.join, sickbeard.CACHE_DIR, 'custom_torrent.html') try: diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py index 4782570a5f..2d04053fa5 100644 --- a/sickbeard/providers/scc.py +++ b/sickbeard/providers/scc.py @@ -69,33 +69,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) # only poll SCC every 20 minutes max - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'submit': 'come on in', - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search(r'Username or password incorrect', response) \ - or re.search(r'SceneAccess \| Login', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - - @staticmethod - def _isSection(section, text): - title = r'.+? \| %s' % section - return re.search(title, text, re.IGNORECASE) - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals,too-many-branches, too-many-statements results = [] if not self.login(): @@ -178,5 +151,32 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'submit': 'come on in', + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search(r'Username or password incorrect', response) \ + or re.search(r'SceneAccess \| Login', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + + @staticmethod + def _is_section(section, text): + title = r'.+? \| %s' % section + return re.search(title, text, re.IGNORECASE) + provider = SCCProvider() diff --git a/sickbeard/providers/scenetime.py b/sickbeard/providers/scenetime.py index 458408e5dd..1299c771d9 100644 --- a/sickbeard/providers/scenetime.py +++ b/sickbeard/providers/scenetime.py @@ -64,26 +64,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) # only poll SceneTime every 20 minutes max - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('Username or password incorrect', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ SceneTime search and parsing @@ -181,5 +161,25 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('Username or password incorrect', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + provider = SceneTimeProvider() diff --git a/sickbeard/providers/shazbat.py b/sickbeard/providers/shazbat.py index 5303da6b19..8e0b1835ef 100644 --- a/sickbeard/providers/shazbat.py +++ b/sickbeard/providers/shazbat.py @@ -62,7 +62,7 @@ def _check_auth(self): return True - def _checkAuthFromData(self, data): + def _check_auth_from_data(self, data): if not self.passkey: self._check_auth() elif data.get('bozo') == 1 and not (data['entries'] and data['feed']): @@ -83,7 +83,7 @@ def _getRSSData(self): return self.getRSSFeed(self.provider.urls['rss_recent'], params=params) def _checkAuth(self, data): - return self.provider._checkAuthFromData(data) # pylint: disable=protected-access + return self.provider._check_auth_from_data(data) # pylint: disable=protected-access provider = ShazbatProvider() diff --git a/sickbeard/providers/speedcd.py b/sickbeard/providers/speedcd.py index 19ca6efe65..aa9e0458b9 100644 --- a/sickbeard/providers/speedcd.py +++ b/sickbeard/providers/speedcd.py @@ -62,26 +62,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('Incorrect username or Password. Please try again.', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches results = [] if not self.login(): @@ -194,5 +174,25 @@ def process_column_header(td): return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('Incorrect username or Password. Please try again.', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + provider = SpeedCDProvider() diff --git a/sickbeard/providers/t411.py b/sickbeard/providers/t411.py index 3a759893f7..4ce84bb94c 100644 --- a/sickbeard/providers/t411.py +++ b/sickbeard/providers/t411.py @@ -69,31 +69,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self, min_time=10) # Only poll T411 every 10 minutes max - def login(self): - if self.token is not None: - if time.time() < (self.tokenLastUpdate + 30 * 60): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - } - - response = self.get_url(self.urls['login_page'], post_data=login_params, returns='json') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if response and 'token' in response: - self.token = response['token'] - self.tokenLastUpdate = time.time() - # self.uid = response['uid'].encode('ascii', 'ignore') - self.session.auth = T411Auth(self.token) - return True - else: - logger.log('Token not found in authentication response', logger.WARNING) - return False - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ T411 search and parsing @@ -188,6 +163,31 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if self.token is not None: + if time.time() < (self.tokenLastUpdate + 30 * 60): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + } + + response = self.get_url(self.urls['login_page'], post_data=login_params, returns='json') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if response and 'token' in response: + self.token = response['token'] + self.tokenLastUpdate = time.time() + # self.uid = response['uid'].encode('ascii', 'ignore') + self.session.auth = T411Auth(self.token) + return True + else: + logger.log('Token not found in authentication response', logger.WARNING) + return False + class T411Auth(AuthBase): # pylint: disable=too-few-public-methods """Attaches HTTP Authentication to the given Request object.""" diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index 3d3a8d32ac..773d9cbaaa 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -69,39 +69,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self, min_time=30) # only poll TNTVillage every 30 minutes max - def _check_auth(self): - - if not self.username or not self.password: - raise AuthException('Your authentication credentials for {0} are missing,' - ' check your config.'.format(self.name)) - - return True - - def login(self): - if len(self.session.cookies) > 1: - cookies_dict = dict_from_cookiejar(self.session.cookies) - if cookies_dict['pass_hash'] != '0' and cookies_dict['member_id'] != '0': - return True - - login_params = { - 'UserName': self.username, - 'PassWord': self.password, - 'CookieDate': 1, - 'submit': 'Connettiti al Forum', - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('Sono stati riscontrati i seguenti errori', response) or \ - re.search('Connettiti', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ TNTVillage search and parsing @@ -160,7 +127,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man params = parse_qs(last_cell_anchor.get('href', '')) download_url = self.urls['download'].format(params['pid'][0]) if \ params.get('pid') else None - title = _normalize_title(cells[0], cells[1], mode) + title = self._process_title(cells[0], cells[1], mode) if not all([title, download_url]): continue @@ -178,7 +145,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man (title, seeders), logger.DEBUG) continue - if _has_only_subs(title) and not self.subtitle: + if self._has_only_subs(title) and not self.subtitle: logger.log('Torrent is only subtitled, skipping: {0}'.format (title), logger.DEBUG) continue @@ -209,55 +176,88 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if len(self.session.cookies) > 1: + cookies_dict = dict_from_cookiejar(self.session.cookies) + if cookies_dict['pass_hash'] != '0' and cookies_dict['member_id'] != '0': + return True -def _normalize_title(title, info, mode): - - result_title = title.find('a').get_text() - result_info = info.find('span') - - if not result_info: - return None - - bad_words = ['[cura]', 'hot', 'season', 'stagione', 'series', 'premiere', 'finale', 'fine', - 'full', 'Completa', 'supereroi', 'commedia', 'drammatico', 'poliziesco', 'azione', - 'giallo', 'politico', 'sitcom', 'funzionante'] + login_params = { + 'UserName': self.username, + 'PassWord': self.password, + 'CookieDate': 1, + 'submit': 'Connettiti al Forum', + } - formatted_info = '' - for info_part in result_info: - if mode == 'RSS': - try: - info_part = info_part.get('src') - info_part = info_part.replace('style_images/mkportal-636/', '') - info_part = info_part.replace('.gif', '').replace('.png', '') - if info_part == 'dolby': - info_part = 'Ac3' - elif info_part == 'fullHd': - info_part = '1080p' - except AttributeError: - info_part = info_part.replace('·', '').replace(',', '') - info_part = info_part.replace('by', '-').strip() - formatted_info += ' ' + info_part - else: - formatted_info = info_part + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False - allowed_words = [word for word in formatted_info.split() if word.lower() not in bad_words] - final_title = '{0} '.format(result_title) + ' '.join(allowed_words).strip('-').strip() + if re.search('Sono stati riscontrati i seguenti errori', response) or \ + re.search('Connettiti', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False - return final_title + return True + def _check_auth(self): -def _has_only_subs(title): + if not self.username or not self.password: + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) - title = title.lower() + return True - if 'sub' in title: - title = title.split() - counter = 0 - for word in title: - if 'ita' in word: - counter = counter + 1 - if counter < 2: - return True + @staticmethod + def _process_title(title, info, mode): + + result_title = title.find('a').get_text() + result_info = info.find('span') + + if not result_info: + return None + + bad_words = ['[cura]', 'hot', 'season', 'stagione', 'series', 'premiere', 'finale', 'fine', + 'full', 'Completa', 'supereroi', 'commedia', 'drammatico', 'poliziesco', 'azione', + 'giallo', 'politico', 'sitcom', 'funzionante'] + + formatted_info = '' + for info_part in result_info: + if mode == 'RSS': + try: + info_part = info_part.get('src') + info_part = info_part.replace('style_images/mkportal-636/', '') + info_part = info_part.replace('.gif', '').replace('.png', '') + if info_part == 'dolby': + info_part = 'Ac3' + elif info_part == 'fullHd': + info_part = '1080p' + except AttributeError: + info_part = info_part.replace('·', '').replace(',', '') + info_part = info_part.replace('by', '-').strip() + formatted_info += ' ' + info_part + else: + formatted_info = info_part + + allowed_words = [word for word in formatted_info.split() if word.lower() not in bad_words] + final_title = '{0} '.format(result_title) + ' '.join(allowed_words).strip('-').strip() + + return final_title + + @staticmethod + def _has_only_subs(title): + + title = title.lower() + + if 'sub' in title: + title = title.split() + counter = 0 + for word in title: + if 'ita' in word: + counter = counter + 1 + if counter < 2: + return True provider = TNTVillageProvider() diff --git a/sickbeard/providers/torrentbytes.py b/sickbeard/providers/torrentbytes.py index a5fc036270..5b580f3fe3 100644 --- a/sickbeard/providers/torrentbytes.py +++ b/sickbeard/providers/torrentbytes.py @@ -62,27 +62,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'login': 'Log in!', - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('Username or password incorrect', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements results = [] if not self.login(): @@ -181,5 +160,26 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'login': 'Log in!', + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('Username or password incorrect', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + provider = TorrentBytesProvider() diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py index 8aa01ff532..829ca1660f 100644 --- a/sickbeard/providers/torrentday.py +++ b/sickbeard/providers/torrentday.py @@ -67,43 +67,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self, min_time=10) # Only poll IPTorrents every 10 minutes max - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - if self._uid and self._hash: - add_dict_to_cookiejar(self.session.cookies, self.cookies) - else: - - login_params = { - 'username': self.username, - 'password': self.password, - 'submit.x': 0, - 'submit.y': 0, - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('You tried too often', response): - logger.log('Too many login access attempts', logger.WARNING) - return False - - try: - if dict_from_cookiejar(self.session.cookies)['uid'] and dict_from_cookiejar(self.session.cookies)['pass']: - self._uid = dict_from_cookiejar(self.session.cookies)['uid'] - self._hash = dict_from_cookiejar(self.session.cookies)['pass'] - self.cookies = {'uid': self._uid, - 'pass': self._hash} - return True - except Exception: - pass - - logger.log('Unable to obtain cookie', logger.WARNING) - return False - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals results = [] if not self.login(): @@ -189,5 +152,42 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + if self._uid and self._hash: + add_dict_to_cookiejar(self.session.cookies, self.cookies) + else: + + login_params = { + 'username': self.username, + 'password': self.password, + 'submit.x': 0, + 'submit.y': 0, + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('You tried too often', response): + logger.log('Too many login access attempts', logger.WARNING) + return False + + try: + if dict_from_cookiejar(self.session.cookies)['uid'] and dict_from_cookiejar(self.session.cookies)['pass']: + self._uid = dict_from_cookiejar(self.session.cookies)['uid'] + self._hash = dict_from_cookiejar(self.session.cookies)['pass'] + self.cookies = {'uid': self._uid, + 'pass': self._hash} + return True + except Exception: + pass + + logger.log('Unable to obtain cookie', logger.WARNING) + return False + provider = TorrentDayProvider() diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py index d761539208..b5ba0be432 100644 --- a/sickbeard/providers/torrentleech.py +++ b/sickbeard/providers/torrentleech.py @@ -61,28 +61,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'login': 'submit', - 'remember_me': 'on', - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('Invalid Username/password', response) or re.search('Login :: TorrentLeech.org', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ TorrentLeech search and parsing @@ -193,5 +171,27 @@ def process_column_header(td): return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'login': 'submit', + 'remember_me': 'on', + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('Invalid Username/password', response) or re.search('Login :: TorrentLeech.org', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + provider = TorrentLeechProvider() diff --git a/sickbeard/providers/torrentz.py b/sickbeard/providers/torrentz.py index f4588d5a7b..267d430496 100644 --- a/sickbeard/providers/torrentz.py +++ b/sickbeard/providers/torrentz.py @@ -60,11 +60,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self, min_time=15) # only poll Torrentz every 15 minutes max - @staticmethod - def _split_description(description): - match = re.findall(r'[0-9]+', description) - return int(match[0]) * 1024 ** 2, int(match[1]), int(match[2]) - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals results = [] @@ -135,5 +130,10 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + @staticmethod + def _split_description(description): + match = re.findall(r'[0-9]+', description) + return int(match[0]) * 1024 ** 2, int(match[1]), int(match[2]) + provider = TorrentzProvider() diff --git a/sickbeard/providers/transmitthenet.py b/sickbeard/providers/transmitthenet.py index 77c6a40a09..c36fed6bd4 100644 --- a/sickbeard/providers/transmitthenet.py +++ b/sickbeard/providers/transmitthenet.py @@ -61,36 +61,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) - def _check_auth(self): - - if not self.username or not self.password: - raise AuthException('Your authentication credentials for {0} are missing,' - ' check your config.'.format(self.name)) - - return True - - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'keeplogged': 'on', - 'login': 'Login' - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('Username Incorrect', response) or re.search('Password Incorrect', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ TransmitTheNet search and parsing @@ -202,5 +172,35 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'keeplogged': 'on', + 'login': 'Login' + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('Username Incorrect', response) or re.search('Password Incorrect', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + + def _check_auth(self): + + if not self.username or not self.password: + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) + + return True + provider = TransmitTheNetProvider() diff --git a/sickbeard/providers/tvchaosuk.py b/sickbeard/providers/tvchaosuk.py index 06b800d63f..ab39816ce4 100644 --- a/sickbeard/providers/tvchaosuk.py +++ b/sickbeard/providers/tvchaosuk.py @@ -61,36 +61,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self) - def _check_auth(self): - if self.username and self.password: - return True - - raise AuthException('Your authentication credentials for {0} are missing,' - ' check your config.'.format(self.name)) - - def login(self): - if len(self.session.cookies) >= 4: - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'logout': 'no', - 'submit': 'LOGIN', - 'returnto': '/browse.php', - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if re.search('Error: Username or password incorrect!', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ TVChaosUK search and parsing @@ -208,5 +178,35 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man return results + def login(self): + if len(self.session.cookies) >= 4: + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'logout': 'no', + 'submit': 'LOGIN', + 'returnto': '/browse.php', + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('Error: Username or password incorrect!', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + + def _check_auth(self): + if self.username and self.password: + return True + + raise AuthException('Your authentication credentials for {0} are missing,' + ' check your config.'.format(self.name)) + provider = TVChaosUKProvider() diff --git a/sickbeard/providers/xthor.py b/sickbeard/providers/xthor.py index c0cd7f0655..01dd6c41b6 100644 --- a/sickbeard/providers/xthor.py +++ b/sickbeard/providers/xthor.py @@ -62,27 +62,6 @@ def __init__(self): # Cache self.cache = tvcache.TVCache(self, min_time=30) - def login(self): - if any(dict_from_cookiejar(self.session.cookies).values()): - return True - - login_params = { - 'username': self.username, - 'password': self.password, - 'submitme': 'X' - } - - response = self.get_url(self.urls['login'], post_data=login_params, returns='text') - if not response: - logger.log('Unable to connect to provider', logger.WARNING) - return False - - if not re.search('donate.php', response): - logger.log('Invalid username or password. Check your settings', logger.WARNING) - return False - - return True - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches """ Xthor search and parsing @@ -204,5 +183,26 @@ def process_column_header(td): return results + def login(self): + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'submitme': 'X' + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if not re.search('donate.php', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + provider = XthorProvider() diff --git a/sickbeard/server/web/config/providers.py b/sickbeard/server/web/config/providers.py index 521b3fb5bc..b9627bb6e8 100644 --- a/sickbeard/server/web/config/providers.py +++ b/sickbeard/server/web/config/providers.py @@ -81,12 +81,12 @@ def saveNewznabProvider(name, url, key=''): else: provider_dict[name].needs_auth = True - return '|'.join([provider_dict[name].get_id(), provider_dict[name].configStr()]) + return '|'.join([provider_dict[name].get_id(), provider_dict[name].config_string()]) else: new_provider = newznab.NewznabProvider(name, url, key=key) sickbeard.newznabProviderList.append(new_provider) - return '|'.join([new_provider.get_id(), new_provider.configStr()]) + return '|'.join([new_provider.get_id(), new_provider.config_string()]) @staticmethod def getNewznabCategories(name, url, key): @@ -152,7 +152,7 @@ def canAddTorrentRssProvider(name, url, cookies, titleTAG): if temp_provider.get_id() in provider_dict: return json.dumps({'error': 'Exists as {name}'.format(name=provider_dict[temp_provider.get_id()].name)}) else: - (succ, err_msg) = temp_provider.validateRSS() + (succ, err_msg) = temp_provider.validate_rss() if succ: return json.dumps({'success': temp_provider.get_id()}) else: @@ -175,12 +175,12 @@ def saveTorrentRssProvider(name, url, cookies, titleTAG): provider_dict[name].cookies = cookies provider_dict[name].titleTAG = titleTAG - return '|'.join([provider_dict[name].get_id(), provider_dict[name].configStr()]) + return '|'.join([provider_dict[name].get_id(), provider_dict[name].config_string()]) else: new_provider = rsstorrent.TorrentRssProvider(name, url, cookies, titleTAG) sickbeard.torrentRssProviderList.append(new_provider) - return '|'.join([new_provider.get_id(), new_provider.configStr()]) + return '|'.join([new_provider.get_id(), new_provider.config_string()]) @staticmethod def deleteTorrentRssProvider(id): @@ -543,7 +543,7 @@ def saveProviders(self, newznab_string='', torrentrss_string='', provider_order= except (AttributeError, KeyError): curNzbProvider.enable_backlog = 0 # these exceptions are actually catching unselected checkboxes - sickbeard.NEWZNAB_DATA = '!!!'.join([x.configStr() for x in sickbeard.newznabProviderList]) + sickbeard.NEWZNAB_DATA = '!!!'.join([x.config_string() for x in sickbeard.newznabProviderList]) sickbeard.PROVIDER_ORDER = provider_list sickbeard.save_config() From 6dacb54a80db856ca2d4bcb2a452df93303f55d7 Mon Sep 17 00:00:00 2001 From: Labrys Date: Sat, 18 Jun 2016 09:28:13 -0400 Subject: [PATCH 074/134] FIx missed URL join --- sickbeard/providers/scc.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py index 2d04053fa5..5e54cef129 100644 --- a/sickbeard/providers/scc.py +++ b/sickbeard/providers/scc.py @@ -44,13 +44,13 @@ def __init__(self): self.password = None # URLs - self.url = self.urls['base_url'] + self.url = 'https://sceneaccess.eu' self.urls = { - 'base_url': 'https://sceneaccess.eu', - 'login': 'https://sceneaccess.eu/login', - 'detail': 'https://www.sceneaccess.eu/details?id=%s', - 'search': 'https://sceneaccess.eu/all?search=%s&method=1&%s', - 'download': 'https://www.sceneaccess.eu/%s' + 'base_url': self.url, + 'login': urljoin(self.url, 'login'), + 'detail': urljoin(self.url, 'details?id=%s'), + 'search': urljoin(self.url, 'all?search=%s&method=1&%s'), + 'download': urljoin(self.url, '%s') } # Proper Strings From d7d9a2b70d47da66aa7fa1d0e92a8900a7e963dd Mon Sep 17 00:00:00 2001 From: Labrys Date: Sat, 18 Jun 2016 09:50:06 -0400 Subject: [PATCH 075/134] Standardize string formatting --- sickbeard/providers/btdigg.py | 4 ++-- sickbeard/providers/btn.py | 18 +++++++++--------- sickbeard/providers/freshontv.py | 2 +- sickbeard/providers/hd4free.py | 3 ++- sickbeard/providers/kat.py | 2 +- sickbeard/providers/limetorrents.py | 2 +- sickbeard/providers/rarbg.py | 2 +- sickbeard/providers/scc.py | 2 +- sickbeard/providers/scenetime.py | 2 +- sickbeard/providers/speedcd.py | 2 +- sickbeard/providers/t411.py | 2 +- sickbeard/providers/thepiratebay.py | 2 +- sickbeard/providers/tokyotoshokan.py | 2 +- sickbeard/providers/torrentbytes.py | 2 +- sickbeard/providers/torrentday.py | 4 ++-- sickbeard/providers/torrentleech.py | 2 +- sickbeard/providers/torrentproject.py | 2 +- sickbeard/providers/torrentz.py | 2 +- sickbeard/providers/transmitthenet.py | 2 +- sickbeard/providers/tvchaosuk.py | 2 +- sickbeard/providers/xthor.py | 2 +- sickbeard/providers/zooqle.py | 4 ++-- 22 files changed, 34 insertions(+), 33 deletions(-) diff --git a/sickbeard/providers/btdigg.py b/sickbeard/providers/btdigg.py index 8e735c3710..d6ca2cb61a 100644 --- a/sickbeard/providers/btdigg.py +++ b/sickbeard/providers/btdigg.py @@ -115,8 +115,8 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" - ' minimum seeders: {0}. Seeders: {1})'.format + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py index dce5604685..937b9f8cf6 100644 --- a/sickbeard/providers/btn.py +++ b/sickbeard/providers/btn.py @@ -84,7 +84,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint:disable=too-many if search_strings: search_params.update(search_strings) - logger.log('Search string: %s' % search_strings, logger.DEBUG) + logger.log('Search string: {0}'.format(search_strings), logger.DEBUG) parsed_json = self._api_call(self.apikey, search_params) if not parsed_json: @@ -119,7 +119,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint:disable=too-many (title, url) = self._get_title_and_url(torrent_info) if title and url: - logger.log('Found result: %s ' % title, logger.DEBUG) + logger.log('Found result: {0} '.format(title), logger.DEBUG) results.append(torrent_info) # FIXME SORT RESULTS @@ -137,7 +137,7 @@ def _check_auth_from_data(self, parsed_json): return self._check_auth() if 'api-error' in parsed_json: - logger.log('Incorrect authentication credentials: %s' % parsed_json['api-error'], logger.DEBUG) + logger.log('Incorrect authentication credentials: {0}'.format(parsed_json['api-error']), logger.DEBUG) raise AuthException('Your authentication credentials for {0} are missing,' ' check your config.'.format(self.name)) @@ -268,27 +268,27 @@ def _api_call(self, apikey, params=None, results_per_page=1000, offset=0): parsed_json = server.getTorrents(apikey, params or {}, int(results_per_page), int(offset)) time.sleep(cpu_presets[sickbeard.CPU_PRESET]) - except jsonrpclib.jsonrpc.ProtocolError, error: + except jsonrpclib.jsonrpc.ProtocolError as error: if error.message == 'Call Limit Exceeded': logger.log('You have exceeded the limit of 150 calls per hour,' ' per API key which is unique to your user account', logger.WARNING) else: - logger.log('JSON-RPC protocol error while accessing provicer. Error: %s ' % repr(error), logger.ERROR) + logger.log('JSON-RPC protocol error while accessing provider. Error: {msg!r} '.format(msg=error), logger.ERROR) parsed_json = {'api-error': ex(error)} return parsed_json except socket.timeout: logger.log('Timeout while accessing provider', logger.WARNING) - except socket.error, error: + except socket.error as error: # Note that sometimes timeouts are thrown as socket errors - logger.log('Socket error while accessing provider. Error: %s ' % error[1], logger.WARNING) + logger.log('Socket error while accessing provider. Error: {msg} '.format(error[1]), logger.WARNING) - except Exception, error: + except Exception as error: errorstring = str(error) if errorstring.startswith('<') and errorstring.endswith('>'): errorstring = errorstring[1:-1] - logger.log('Unknown error while accessing provider. Error: %s ' % errorstring, logger.WARNING) + logger.log('Unknown error while accessing provider. Error: {msg} '.format(msg=errorstring), logger.WARNING) return parsed_json diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index cddf247320..bd0e6834d6 100644 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -169,7 +169,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/hd4free.py b/sickbeard/providers/hd4free.py index 02e0ba2a28..f367a59a1b 100644 --- a/sickbeard/providers/hd4free.py +++ b/sickbeard/providers/hd4free.py @@ -162,7 +162,8 @@ def _check_auth(self): if self.username and self.api_key: return True - logger.log('Your authentication credentials for %s are missing, check your config.' % self.name, logger.WARNING) + logger.log('Your authentication credentials for {provider} are missing, check your config.'.format + (provider=self.name), logger.WARNING) return False provider = HD4FreeProvider() diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py index c9d6d3d704..b44a012375 100644 --- a/sickbeard/providers/kat.py +++ b/sickbeard/providers/kat.py @@ -122,7 +122,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/limetorrents.py b/sickbeard/providers/limetorrents.py index 1174464701..5af0c7f0e4 100644 --- a/sickbeard/providers/limetorrents.py +++ b/sickbeard/providers/limetorrents.py @@ -156,7 +156,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " ' minimum seeders: {0}. Seeders: {1}'.format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/rarbg.py b/sickbeard/providers/rarbg.py index 618f408065..c4e8f5f85a 100644 --- a/sickbeard/providers/rarbg.py +++ b/sickbeard/providers/rarbg.py @@ -163,7 +163,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " " minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py index 5e54cef129..6d66f0df10 100644 --- a/sickbeard/providers/scc.py +++ b/sickbeard/providers/scc.py @@ -120,7 +120,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/scenetime.py b/sickbeard/providers/scenetime.py index 1299c771d9..0f0ae55abb 100644 --- a/sickbeard/providers/scenetime.py +++ b/sickbeard/providers/scenetime.py @@ -130,7 +130,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/speedcd.py b/sickbeard/providers/speedcd.py index aa9e0458b9..1e23c08796 100644 --- a/sickbeard/providers/speedcd.py +++ b/sickbeard/providers/speedcd.py @@ -142,7 +142,7 @@ def process_column_header(td): # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/t411.py b/sickbeard/providers/t411.py index 4ce84bb94c..712aecb5bf 100644 --- a/sickbeard/providers/t411.py +++ b/sickbeard/providers/t411.py @@ -127,7 +127,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py index eefdce7eef..3e02992e5c 100644 --- a/sickbeard/providers/thepiratebay.py +++ b/sickbeard/providers/thepiratebay.py @@ -144,7 +144,7 @@ def process_column_header(th): # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/tokyotoshokan.py b/sickbeard/providers/tokyotoshokan.py index 6e71464498..09809ef52c 100644 --- a/sickbeard/providers/tokyotoshokan.py +++ b/sickbeard/providers/tokyotoshokan.py @@ -121,7 +121,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/torrentbytes.py b/sickbeard/providers/torrentbytes.py index 5b580f3fe3..37b3438424 100644 --- a/sickbeard/providers/torrentbytes.py +++ b/sickbeard/providers/torrentbytes.py @@ -129,7 +129,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py index 829ca1660f..450705b96e 100644 --- a/sickbeard/providers/torrentday.py +++ b/sickbeard/providers/torrentday.py @@ -121,7 +121,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue @@ -136,7 +136,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None, + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py index b5ba0be432..ccf47c7801 100644 --- a/sickbeard/providers/torrentleech.py +++ b/sickbeard/providers/torrentleech.py @@ -140,7 +140,7 @@ def process_column_header(td): # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " " minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/torrentproject.py b/sickbeard/providers/torrentproject.py index 0f4ef98ed2..cde2f11b8e 100644 --- a/sickbeard/providers/torrentproject.py +++ b/sickbeard/providers/torrentproject.py @@ -103,7 +103,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/torrentz.py b/sickbeard/providers/torrentz.py index 267d430496..c296e64a20 100644 --- a/sickbeard/providers/torrentz.py +++ b/sickbeard/providers/torrentz.py @@ -102,7 +102,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/transmitthenet.py b/sickbeard/providers/transmitthenet.py index c36fed6bd4..3c676c8253 100644 --- a/sickbeard/providers/transmitthenet.py +++ b/sickbeard/providers/transmitthenet.py @@ -142,7 +142,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " " minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/tvchaosuk.py b/sickbeard/providers/tvchaosuk.py index ab39816ce4..79f7f4570f 100644 --- a/sickbeard/providers/tvchaosuk.py +++ b/sickbeard/providers/tvchaosuk.py @@ -133,7 +133,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/xthor.py b/sickbeard/providers/xthor.py index 01dd6c41b6..76b7de7f9f 100644 --- a/sickbeard/providers/xthor.py +++ b/sickbeard/providers/xthor.py @@ -152,7 +152,7 @@ def process_column_header(td): # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/zooqle.py b/sickbeard/providers/zooqle.py index 5a2a4f2ea3..9c71949197 100644 --- a/sickbeard/providers/zooqle.py +++ b/sickbeard/providers/zooqle.py @@ -112,7 +112,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man title = cells[1].find('a').get_text() magnet = cells[2].find('a')['href'] download_url = '{magnet}{trackers}'.format(magnet=magnet, - trackers=self._custom_trackers) + trackers=self._custom_trackers) if not all([title, download_url]): continue @@ -127,7 +127,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': - logger.log("Discarding torrent because it doesn't meet the" + logger.log("Discarding torrent because it doesn't meet the " "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue From 3259a5efb161fd31e7aa5b464e13346ec0f048e6 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 18 Jun 2016 16:20:11 +0200 Subject: [PATCH 076/134] Last small changes --- sickbeard/providers/btn.py | 8 ++++---- sickbeard/providers/freshontv.py | 2 +- sickbeard/providers/kat.py | 2 +- sickbeard/providers/limetorrents.py | 2 +- sickbeard/providers/rarbg.py | 2 +- sickbeard/providers/torrentday.py | 2 +- sickbeard/providers/torrentleech.py | 2 +- sickbeard/providers/torrentproject.py | 2 +- sickbeard/providers/transmitthenet.py | 2 +- 9 files changed, 12 insertions(+), 12 deletions(-) diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py index 937b9f8cf6..61b958745c 100644 --- a/sickbeard/providers/btn.py +++ b/sickbeard/providers/btn.py @@ -119,7 +119,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint:disable=too-many (title, url) = self._get_title_and_url(torrent_info) if title and url: - logger.log('Found result: {0} '.format(title), logger.DEBUG) + logger.log('Found result: {0}'.format(title), logger.DEBUG) results.append(torrent_info) # FIXME SORT RESULTS @@ -273,7 +273,7 @@ def _api_call(self, apikey, params=None, results_per_page=1000, offset=0): logger.log('You have exceeded the limit of 150 calls per hour,' ' per API key which is unique to your user account', logger.WARNING) else: - logger.log('JSON-RPC protocol error while accessing provider. Error: {msg!r} '.format(msg=error), logger.ERROR) + logger.log('JSON-RPC protocol error while accessing provider. Error: {msg!r}'.format(msg=error), logger.ERROR) parsed_json = {'api-error': ex(error)} return parsed_json @@ -282,13 +282,13 @@ def _api_call(self, apikey, params=None, results_per_page=1000, offset=0): except socket.error as error: # Note that sometimes timeouts are thrown as socket errors - logger.log('Socket error while accessing provider. Error: {msg} '.format(error[1]), logger.WARNING) + logger.log('Socket error while accessing provider. Error: {msg}'.format(msg=error[1]), logger.WARNING) except Exception as error: errorstring = str(error) if errorstring.startswith('<') and errorstring.endswith('>'): errorstring = errorstring[1:-1] - logger.log('Unknown error while accessing provider. Error: {msg} '.format(msg=errorstring), logger.WARNING) + logger.log('Unknown error while accessing provider. Error: {msg}'.format(msg=errorstring), logger.WARNING) return parsed_json diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index bd0e6834d6..6dd3335295 100644 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -170,7 +170,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the " - ' minimum seeders: {0}. Seeders: {1}'.format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py index b44a012375..13cbcd0a32 100644 --- a/sickbeard/providers/kat.py +++ b/sickbeard/providers/kat.py @@ -123,7 +123,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the " - ' minimum seeders: {0}. Seeders: {1}'.format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/limetorrents.py b/sickbeard/providers/limetorrents.py index 5af0c7f0e4..97f0b83ace 100644 --- a/sickbeard/providers/limetorrents.py +++ b/sickbeard/providers/limetorrents.py @@ -157,7 +157,7 @@ def parse(self, data, mode): if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the " - ' minimum seeders: {0}. Seeders: {1}'.format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/rarbg.py b/sickbeard/providers/rarbg.py index c4e8f5f85a..7261c49f98 100644 --- a/sickbeard/providers/rarbg.py +++ b/sickbeard/providers/rarbg.py @@ -164,7 +164,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the " - " minimum seeders: {0}. Seeders: {1}".format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py index 450705b96e..45848cf5ef 100644 --- a/sickbeard/providers/torrentday.py +++ b/sickbeard/providers/torrentday.py @@ -122,7 +122,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the " - "minimum seeders: {0}. Seeders: {1}".format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py index ccf47c7801..7341eda9dd 100644 --- a/sickbeard/providers/torrentleech.py +++ b/sickbeard/providers/torrentleech.py @@ -141,7 +141,7 @@ def process_column_header(td): if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the " - " minimum seeders: {0}. Seeders: {1}".format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/torrentproject.py b/sickbeard/providers/torrentproject.py index cde2f11b8e..67e2f9bb1e 100644 --- a/sickbeard/providers/torrentproject.py +++ b/sickbeard/providers/torrentproject.py @@ -104,7 +104,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the " - "minimum seeders: {0}. Seeders: {1}".format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue diff --git a/sickbeard/providers/transmitthenet.py b/sickbeard/providers/transmitthenet.py index 3c676c8253..9dc6c5fa5a 100644 --- a/sickbeard/providers/transmitthenet.py +++ b/sickbeard/providers/transmitthenet.py @@ -143,7 +143,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if seeders < min(self.minseed, 1): if mode != 'RSS': logger.log("Discarding torrent because it doesn't meet the " - " minimum seeders: {0}. Seeders: {1}".format + "minimum seeders: {0}. Seeders: {1}".format (title, seeders), logger.DEBUG) continue From 97547ac3e9dfa8d20b26b94b3b02d6e5c8d467d4 Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 18 Jun 2016 16:22:27 +0200 Subject: [PATCH 077/134] Change TPB url, update cache to 20 min --- sickbeard/providers/thepiratebay.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py index 3e02992e5c..9c4556eeab 100644 --- a/sickbeard/providers/thepiratebay.py +++ b/sickbeard/providers/thepiratebay.py @@ -42,7 +42,7 @@ def __init__(self): self.public = True # URLs - self.url = 'https://thepiratebay.se' + self.url = 'https://thepiratebay.org' self.urls = { 'rss': urljoin(self.url, 'tv/latest'), 'search': urljoin(self.url, 's/'), # Needs trailing / @@ -59,7 +59,7 @@ def __init__(self): self.minleech = None # Cache - self.cache = tvcache.TVCache(self, min_time=1) # only poll ThePirateBay every 30 minutes max + self.cache = tvcache.TVCache(self, min_time=20) # only poll ThePirateBay every 20 minutes max def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements results = [] From fbffd218ad1c9cbdce3043dcd6b192cb01031b97 Mon Sep 17 00:00:00 2001 From: Labrys of Knossos Date: Sat, 18 Jun 2016 10:28:14 -0400 Subject: [PATCH 078/134] More providers (#698) * Remove redundant character escapes * Use augmented assignment * Fix indentation * Use six.iteritems for py3 compatibility --- sickbeard/providers/alpharatio.py | 4 ++-- sickbeard/providers/btn.py | 9 +++++---- sickbeard/providers/iptorrents.py | 2 +- sickbeard/providers/tntvillage.py | 2 +- sickbeard/providers/torrentday.py | 2 +- sickbeard/providers/torrentproject.py | 2 +- 6 files changed, 11 insertions(+), 10 deletions(-) diff --git a/sickbeard/providers/alpharatio.py b/sickbeard/providers/alpharatio.py index 0f5934c380..4ea71a35ec 100644 --- a/sickbeard/providers/alpharatio.py +++ b/sickbeard/providers/alpharatio.py @@ -190,8 +190,8 @@ def login(self): logger.log('Unable to connect to provider', logger.WARNING) return False - if re.search('Invalid Username/password', response) \ - or re.search('Login :: AlphaRatio.cc', response): + if any([re.search('Invalid Username/password', response), + re.search('Login :: AlphaRatio.cc', response)]): logger.log('Invalid username or password. Check your settings', logger.WARNING) return False diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py index 61b958745c..73d3caa412 100644 --- a/sickbeard/providers/btn.py +++ b/sickbeard/providers/btn.py @@ -22,8 +22,9 @@ import math import socket import time -import sickbeard +from six import iteritems +import sickbeard from datetime import datetime from sickbeard import classes, logger, scene_exceptions, tvcache @@ -110,12 +111,12 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint:disable=too-many # +1 because range(1,4) = 1, 2, 3 for page in range(1, pages_needed + 1): parsed_json = self._api_call(self.apikey, search_params, results_per_page, page * results_per_page) - # Note that this these are individual requests and might time out individually. This would result in 'gaps' - # in the results. There is no way to fix this though. + # Note that these are individual requests and might time out individually. + # This would result in 'gaps' in the results. There is no way to fix this though. if 'torrents' in parsed_json: found_torrents.update(parsed_json['torrents']) - for _, torrent_info in found_torrents.iteritems(): + for _, torrent_info in iteritems(found_torrents): (title, url) = self._get_title_and_url(torrent_info) if title and url: diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py index 788339e9ac..e578078cf9 100644 --- a/sickbeard/providers/iptorrents.py +++ b/sickbeard/providers/iptorrents.py @@ -90,7 +90,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log('No data returned from provider', logger.DEBUG) continue - data = re.sub(r'(?im)', '', data, 0) + data = re.sub(r'(?im)', '', data, 0) with BS4Parser(data, 'html5lib') as html: torrent_table = html.find('table', attrs={'class': 'torrents'}) torrents = torrent_table('tr') if torrent_table else [] diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index 773d9cbaaa..6b5264be72 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -255,7 +255,7 @@ def _has_only_subs(title): counter = 0 for word in title: if 'ita' in word: - counter = counter + 1 + counter += 1 if counter < 2: return True diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py index 45848cf5ef..c9e5825b60 100644 --- a/sickbeard/providers/torrentday.py +++ b/sickbeard/providers/torrentday.py @@ -110,7 +110,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for torrent in torrents: try: - title = re.sub(r'\[.*\=.*\].*\[/.*\]', '', torrent['name']) if torrent['name'] else None + title = re.sub(r'\[.*=.*\].*\[/.*\]', '', torrent['name']) if torrent['name'] else None download_url = urljoin(self.urls['download'], '{}/{}'.format(torrent['id'], torrent['fname'])) if torrent['id'] and torrent['fname'] else None if not all([title, download_url]): continue diff --git a/sickbeard/providers/torrentproject.py b/sickbeard/providers/torrentproject.py index 67e2f9bb1e..051a7b5d01 100644 --- a/sickbeard/providers/torrentproject.py +++ b/sickbeard/providers/torrentproject.py @@ -119,7 +119,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None, + 'hash': None, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format From 40c958115dc947238a7bc3d186bc53b9a2cceb1e Mon Sep 17 00:00:00 2001 From: medariox Date: Sat, 18 Jun 2016 16:47:03 +0200 Subject: [PATCH 079/134] Store hash for torrentproject --- sickbeard/providers/torrentproject.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sickbeard/providers/torrentproject.py b/sickbeard/providers/torrentproject.py index 051a7b5d01..8426e64c02 100644 --- a/sickbeard/providers/torrentproject.py +++ b/sickbeard/providers/torrentproject.py @@ -108,9 +108,9 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man (title, seeders), logger.DEBUG) continue - torrent_hash = torrents[result].get('torrent_hash') torrent_size = torrents[result].get('torrent_size') size = convert_size(torrent_size) or -1 + torrent_hash = torrents[result].get('torrent_hash') item = { 'title': title, @@ -119,7 +119,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man 'seeders': seeders, 'leechers': leechers, 'pubdate': None, - 'hash': None, + 'hash': torrent_hash, } if mode != 'RSS': logger.log('Found result: {0} with {1} seeders and {2} leechers'.format From ee2a07628f5017efc8dad4980e315bd20d10c891 Mon Sep 17 00:00:00 2001 From: p0ps Date: Tue, 21 Jun 2016 14:11:44 +0200 Subject: [PATCH 080/134] Tried fixing some seeders being marked red (not meeting the minimum required seeders) when they shouldn't. (#672) --- gui/slick/views/snatchSelection.mako | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/gui/slick/views/snatchSelection.mako b/gui/slick/views/snatchSelection.mako index 9cb4a0b5dc..55de85ac1d 100644 --- a/gui/slick/views/snatchSelection.mako +++ b/gui/slick/views/snatchSelection.mako @@ -317,9 +317,9 @@ if hItem["name"] and preferred_words and show_name_helpers.containsAtLeastOneWord(hItem["name"], preferred_words): name_preferred = True - if hItem["provider_minseed"] and hItem["seeders"] and hItem["seeders"] > -1 and int(hItem["seeders"]) < hItem["provider_minseed"]: + if hItem["provider_minseed"] and int(hItem["seeders"]) > -1 and int(hItem["seeders"]) < int(hItem["provider_minseed"]): below_minseed = True - if hItem["provider_minleech"] and hItem["leechers"] and hItem["leechers"] > -1 and int(hItem["leechers"]) < hItem["provider_minleech"]: + if hItem["provider_minleech"] and int(hItem["leechers"]) > -1 and int(hItem["leechers"]) < int(hItem["provider_minleech"]): below_minleech = True %> @@ -360,6 +360,7 @@ % endif + % if below_minseed: % else: From 4cffaf46648275ecb7c995bc5c1ff4a98b9d54f8 Mon Sep 17 00:00:00 2001 From: Dario Date: Tue, 21 Jun 2016 14:39:40 +0200 Subject: [PATCH 081/134] Fix HDSpace and SceneTime, improve Zooqle (#701) * Fix HDSPace (needs rewrite) * Add empty result log * Add try_int and urljoin * Fix SceneTime * Improve Zooqle * Use search params for SceneTime, add cache time (20 min), use response * Use search params for HDSpace, use response --- sickbeard/providers/hdspace.py | 51 ++++++++++++++++---------------- sickbeard/providers/scenetime.py | 35 ++++++++++++++-------- sickbeard/providers/zooqle.py | 11 +++---- 3 files changed, 53 insertions(+), 44 deletions(-) diff --git a/sickbeard/providers/hdspace.py b/sickbeard/providers/hdspace.py index da7b6757b1..7cdbc96b4d 100644 --- a/sickbeard/providers/hdspace.py +++ b/sickbeard/providers/hdspace.py @@ -23,8 +23,8 @@ import re import traceback +from requests.compat import urljoin from requests.utils import dict_from_cookiejar -from requests.compat import quote_plus from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser @@ -47,21 +47,13 @@ def __init__(self): # URLs self.url = 'https://hd-space.org' self.urls = { - 'base_url': self.url, - 'login': 'https://hd-space.org/index.php', - 'search': 'https://hd-space.org/index.php?page=torrents&search=%s&active=1&options=0', - 'rss': 'https://hd-space.org/rss_torrents.php?feed=dl', + 'login': urljoin(self.url, 'index.php?page=login'), + 'search': urljoin(self.url, 'index.php'), } # Proper Strings # Miscellaneous Options - self.categories = [15, 21, 22, 24, 25, 40] # HDTV/DOC 1080/720, bluray, remux - self.urls['search'] += '&category=' - for cat in self.categories: - self.urls['search'] += str(cat) + '%%3B' - self.urls['rss'] += '&cat[]=' + str(cat) - self.urls['search'] = self.urls['search'][:-4] # remove extra %%3B # Torrent Stats self.minseed = None @@ -83,6 +75,15 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if not self.login(): return results + # Search Params + search_params = { + 'page': 'torrents', + 'search': '', + 'active': 0, + 'options': 0, + 'category': '15;40;21;22;24;25;27;28', + } + for mode in search_strings: items = [] logger.log('Search mode: {0}'.format(mode), logger.DEBUG) @@ -92,12 +93,10 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if mode != 'RSS': logger.log('Search string: {search}'.format (search=search_string), logger.DEBUG) - search_url = self.urls['search'] % (quote_plus(search_string.replace('.', ' ')),) - else: - search_url = self.urls['search'] % '' + search_params['search'] = search_string - data = self.get_url(search_url, returns='text') - if not data or 'please try later' in data: + response = self.get_url(self.urls['search'], params=search_params, returns='response') + if not response or not response.text or 'please try later' in response.text: logger.log('No data returned from provider', logger.DEBUG) continue @@ -105,36 +104,36 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # We cut everything before the table that contains the data we are interested in thus eliminating # the invalid html portions try: - data = data.split('
')[1] - index = data.index(' 6: + peers = cells[6].find('div') + if peers and peers.get('title'): + peers = peers['title'].replace(',', '').split(' | ', 1) + seeders = try_int(peers[0].strip('Seeders: ')) + leechers = try_int(peers[1].strip('Leechers: ')) # Filter unseeded torrent if seeders < min(self.minseed, 1): From 657edb4c491637363f299edd5d1b52c8c2ba8d7e Mon Sep 17 00:00:00 2001 From: p0ps Date: Tue, 21 Jun 2016 14:50:03 +0200 Subject: [PATCH 082/134] Added AnimeBytes support (Only seasonpack) (#674) Added provider AnimeBytes --- gui/slick/images/providers/animebytes.png | Bin 0 -> 619 bytes sickbeard/name_parser/regexes.py | 10 + sickbeard/providers/__init__.py | 4 +- sickbeard/providers/animebytes.py | 354 ++++++++++++++++++++++ sickrage/providers/GenericProvider.py | 6 +- 5 files changed, 370 insertions(+), 4 deletions(-) create mode 100644 gui/slick/images/providers/animebytes.png create mode 100644 sickbeard/providers/animebytes.py diff --git a/gui/slick/images/providers/animebytes.png b/gui/slick/images/providers/animebytes.png new file mode 100644 index 0000000000000000000000000000000000000000..61997c2af562f69788f24ef02403a85ca07fcc7a GIT binary patch literal 619 zcmV-x0+juUP)X};R=fo8w(Ho#r@Ac_xwMc%ReG$F2)MO%drDn z(D^6dz#Lv-{9NVPRL~?Y;3!UF2;m~8HN3-FTy1XZ#limKveNOrhk{&oF}?8O{ToA{ zW~Ptgai)n_kCDFi?jTo3k}i6-_OA^ZUoB-C&u_yh2DbICrILF3{;hhHn4g_<`Qa^R zZeDi#$xvkjAu~7=wM4vkckqRD4|@{ z7NcBMk7DrT`=s+9?oqB9%Jx7J10e>nVt1~eC~wTXDjV7|w(j*_MF4lmhgzFxH^ zUnEM@)L{W>h#WGl)j+E1RXhdr((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}))? # E02 (v(?P[0-9]))? # v2 .*? # Separator and EOL + """), + ('anime_season_only', + # Show.Name.S01.Source.Quality.Etc-Group + r""" + ^((?P.+?)[. _-]+)? # Show_Name and separator + s(eason[. _-])? # S01/Season 01 + (?P\d+)[. _-]* # S01 and optional separator + [. _-]*((?P.+?) # Source_Quality_Etc- + ((?[^ -]+([. _-]\[.*\])?))?)?$ # Group """) ] diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py index 62c1dfeff6..4f9fbfc260 100644 --- a/sickbeard/providers/__init__.py +++ b/sickbeard/providers/__init__.py @@ -25,7 +25,7 @@ omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, speedcd, nyaatorrents, bluetigers, xthor, abnormal, torrentbytes, cpasbien,\ freshontv, morethantv, t411, tokyotoshokan, shazbat, rarbg, alpharatio, tntvillage, binsearch, torrentproject, extratorrent, \ scenetime, btdigg, transmitthenet, tvchaosuk, bitcannon, pretome, gftracker, hdspace, newpct, elitetorrent, bitsnoop, danishbits, hd4free, limetorrents, \ - norbits, ilovetorrents, anizb, bithdtv, zooqle + norbits, ilovetorrents, anizb, bithdtv, zooqle, animebytes __all__ = [ 'womble', 'btn', 'thepiratebay', 'kat', 'torrentleech', 'scc', 'hdtorrents', @@ -36,7 +36,7 @@ 'xthor', 'abnormal', 'scenetime', 'btdigg', 'transmitthenet', 'tvchaosuk', 'torrentproject', 'extratorrent', 'bitcannon', 'torrentz', 'pretome', 'gftracker', 'hdspace', 'newpct', 'elitetorrent', 'bitsnoop', 'danishbits', 'hd4free', 'limetorrents', - 'norbits', 'ilovetorrents', 'anizb', 'bithdtv', 'zooqle' + 'norbits', 'ilovetorrents', 'anizb', 'bithdtv', 'zooqle', 'animebytes' ] diff --git a/sickbeard/providers/animebytes.py b/sickbeard/providers/animebytes.py new file mode 100644 index 0000000000..10a59400f5 --- /dev/null +++ b/sickbeard/providers/animebytes.py @@ -0,0 +1,354 @@ +# coding=utf-8 +# Author: p0ps +# +# This file is part of Medusa. +# +# Medusa is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Medusa is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Medusa. If not, see . + +from __future__ import unicode_literals + +import re +import traceback + +from urlparse import parse_qs + +from requests.compat import urljoin +from requests.utils import dict_from_cookiejar + +from sickbeard import logger, tvcache +from sickbeard.bs4_parser import BS4Parser + +from sickrage.helper.common import convert_size +from sickrage.providers.torrent.TorrentProvider import TorrentProvider +from sickbeard.show_name_helpers import allPossibleShowNames + + +SEASON_PACK = 1 +SINGLE_EP = 2 +MULTI_EP = 3 +MULTI_SEASON = 4 +COMPLETE = 5 +OTHER = 6 + + +class AnimeBytes(TorrentProvider): # pylint: disable=too-many-instance-attributes + """AnimeBytes Torrent provider""" + def __init__(self): + + # Provider Init + TorrentProvider.__init__(self, 'AnimeBytes') + + # Credentials + self.username = None + self.password = None + + # URLs + self.url = 'https://animebytes.tv/' + self.urls = { + 'login': urljoin(self.url, '/user/login'), + 'search': urljoin(self.url, 'torrents.php'), + 'download': urljoin(self.url, '/torrent/{torrent_id}/download/{passkey}'), + } + + # Proper Strings + self.proper_strings = [] + + # Miscellaneous Options + + # Torrent Stats + self.minseed = None + self.minleech = None + + # Cache + self.cache = tvcache.TVCache(self, min_time=30) + + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + AnimeBytes search and parsing + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ + _ = age + _ = ep_obj + results = [] + + if self.show and not self.show.is_anime: + return results + + if not self.login(): + return results + + episode = None + season = None + + # Search Params + search_params = { + 'filter_cat[1]': '1', + 'filter_cat[5]': '1', + 'action': 'advanced', + 'search_type': 'title', + 'year': '', + 'year2': '', + 'tags': '', + 'tags_type': '0', + 'sort': 'time_added', + 'way': 'desc', + 'hentai': '2', + 'anime[tv_series]': '1', + 'anime[tv_special]': '1', + 'releasegroup': '', + 'epcount': '', + 'epcount2': '', + 'artbooktitle': '', + } + + for mode in search_strings: + items = [] + logger.log('Search Mode: {0}'.format(mode), logger.DEBUG) + + for search_string in search_strings[mode]: + + if mode != 'RSS': + logger.log('Search string: {0}'.format(search_string), + logger.DEBUG) + search_params['searchstr'] = search_string + + data = self.get_url(self.urls['search'], params=search_params, returns='text') + if not data: + logger.log('No data returned from provider', logger.DEBUG) + continue + + with BS4Parser(data, 'html5lib') as html: + torrent_div = html.find('div', class_='thin') + torrent_group = torrent_div.find_all('div', class_='group_cont box anime') + + if not torrent_group: + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + continue + + for group in torrent_group: + torrent_main = group.find_all('div', class_='group_main') + + for row in torrent_main: + try: + show_name = row.find('span', class_='group_title').find_next('a').get_text() + show_table = row.find('table', class_='torrent_group') + show_info = show_table.find_all('td') + + # A type of release used to determine how to parse the release + # For example a SINGLE_EP should be parsed like: show_name.episode.12.[source].[codec].[release_group] + # A multi ep release shoiuld look like: show_name.episode.1-12.[source].. + release_type = OTHER + + rows_to_skip = 0 + + for index, info in enumerate(show_info): + + if rows_to_skip: + rows_to_skip = rows_to_skip - 1 + continue + + info = info.get_text(strip=True) + + if show_name and info.startswith('[DL]'): + # Set skip next 4 rows, as they are useless + rows_to_skip = 4 + + hrefs = show_info[index].find_all('a') + params = parse_qs(hrefs[0].get('href', '')) + properties_string = hrefs[1].get_text().rstrip(' |').replace('|', '.').replace(' ', '') + properties_string = properties_string.replace('h26410-bit', 'h264.hi10p') # Hack for the h264 10bit stuff + properties = properties_string.split('.') + download_url = self.urls['download'].format(torrent_id=params['id'][0], + passkey=params['torrent_pass'][0]) + if not all([params, properties]): + continue + + tags = '{torrent_source}.{torrent_container}.{torrent_codec}.{torrent_res}.' \ + '{torrent_audio}'.format(torrent_source=properties[0], + torrent_container=properties[1], + torrent_codec=properties[2], + torrent_res=properties[3], + torrent_audio=properties[4]) + + last_field = re.match(r'(.*)\((.*)\)', properties[-1]) + # subs = last_field.group(1) if last_field else '' # We're not doing anything with this for now + release_group = '-{0}'.format(last_field.group(2)) if last_field else '' + + # Construct title based on the release type + + if release_type == SINGLE_EP: + # Create the single episode release_name + # Single.Episode.TV.Show.SXXEXX[Episode.Part].[Episode.Title].TAGS.[LANGUAGE].720p.FORMAT.x264-GROUP + title = '{title}.{season}{episode}.{tags}' \ + '{release_group}'.format(title=show_name, + season='S{0}'.format(season) if season else 'S01', + episode='E{0}'.format(episode), + tags=tags, + release_group=release_group) + if release_type == MULTI_EP: + # Create the multi-episode release_name + # Multiple.Episode.TV.Show.SXXEXX-EXX[Episode.Part].[Episode.Title].TAGS.[LANGUAGE].720p.FORMAT.x264-GROUP + title = '{title}.{season}{multi_episode}.{tags}' \ + '{release_group}'.format(title=show_name, + season='S{0}'.format(season) if season else 'S01', + multi_episode='E01-E{0}'.format(episode), + tags=tags, + release_group=release_group) + if release_type == SEASON_PACK: + # Create the season pack release_name + title = '{title}.{season}.{tags}' \ + '{release_group}'.format(title=show_name, + season='S{0}'.format(season) if season else 'S01', + tags=tags, + release_group=release_group) + + if release_type == MULTI_SEASON: + # Create the multi season pack release_name + # Multiple.Episode.TV.Show.EXX-EXX[Episode.Part].[Episode.Title].TAGS.[LANGUAGE].720p.FORMAT.x264-GROUP + title = '{title}.{episode}.{tags}' \ + '{release_group}'.format(title=show_name, + episode=episode, + tags=tags, + release_group=release_group) + + seeders = show_info[index + 3].get_text() + leechers = show_info[index + 4].get_text() + + # Filter unseeded torrent + if seeders < min(self.minseed, 1): + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the" + ' minimum seeders: {0}. Seeders: {1}'.format + (title, seeders), logger.DEBUG) + continue + + torrent_size = show_info[index + 1].get_text() + size = convert_size(torrent_size) or -1 + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) + + items.append(item) + + # Determine episode, season and type + if info.startswith('Episode'): + # show_name = '{0}.{1}'.format(show_title, info) + episode = re.match('^Episode.([0-9]+)', info).group(1) + release_type = SINGLE_EP + elif info.startswith('Season'): + # Test for MultiSeason pack + if re.match('Season.[0-9]+-[0-9]+.\([0-9-]+\)', info): + # We can read the season AND the episodes, but we can only process multiep. + # So i've chosen to use it like 12-23 or 1-12. + match = re.match('Season.([0-9]+)-([0-9]+).\(([0-9-]+)\)', info) + episode = match.group(3).upper() + season = '{0}-{1}'.format(match.group(1), match.group(2)) + release_type = MULTI_SEASON + else: + season = re.match('Season.([0-9]+)', info).group(1) + # show_name = '{0}.{1}'.format(show_title, info) + release_type = SEASON_PACK + elif re.match('([0-9]+).episodes.*', info): + # This is a season pack, but, let's use it as a multi ep for now + # 13 episodes -> SXXEXX-EXX + episode = re.match('^([0-9]+).episodes.*', info).group(1) + release_type = MULTI_EP + else: + # Row is useless, skip it (eg. only animation studio) + continue + + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue + + results += items + + return results + + def login(self): + """Login to AnimeBytes, check of the session cookie""" + if (any(dict_from_cookiejar(self.session.cookies).values()) and + dict_from_cookiejar(self.session.cookies).get('session')): + return True + + # Get csrf_token + data = self.get_url(self.urls['login'], returns='text') + with BS4Parser(data, 'html5lib') as html: + csrf_token = html.find('input', {'name': 'csrf_token'}).get('value') + + if not csrf_token: + logger.log("Unable to get csrf_token, can't login", logger.WARNING) + return False + + login_params = { + 'username': self.username, + 'password': self.password, + 'csrf_token': csrf_token, + 'login': 'Log In!', + 'keeplogged_sent': 'true', + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if re.search('Login incorrect. Only perfect spellers may enter this system!', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) + self.session.cookies.clear() + return False + + return True + + def _get_episode_search_strings(self, episode, add_string=''): + """Method override because AnimeBytes doesnt support searching showname + episode number""" + if not episode: + return [] + + search_string = { + 'Episode': [] + } + + for show_name in allPossibleShowNames(episode.show, season=episode.scene_season): + search_string['Episode'].append(show_name.strip()) + + return [search_string] + + def _get_season_search_strings(self, episode): + """Method override because AnimeBytes doesnt support searching showname + season number""" + search_string = { + 'Season': [] + } + + for show_name in allPossibleShowNames(episode.show, season=episode.scene_season): + search_string['Season'].append(show_name.strip()) + + return [search_string] + + +provider = AnimeBytes() diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py index 359f17694b..e1fad5b848 100644 --- a/sickrage/providers/GenericProvider.py +++ b/sickrage/providers/GenericProvider.py @@ -188,7 +188,7 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, try: parse_result = NameParser(parse_method=('normal', 'anime')[show.is_anime]).parse(title) except (InvalidNameException, InvalidShowException) as error: - logger.log(u"{}".format(error), logger.DEBUG) + logger.log(u"{error}".format(error=error), logger.DEBUG) continue show_object = parse_result.show @@ -219,7 +219,9 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, parse_result.season_number is not None, parse_result.episode_numbers, [ep for ep in episodes if (ep.season, ep.scene_season)[ep.show.is_scene] == - parse_result.season_number and (ep.episode, ep.scene_episode)[ep.show.is_scene] in parse_result.episode_numbers] + parse_result.season_number and + (ep.episode, ep.scene_episode)[ep.show.is_scene] in + parse_result.episode_numbers] ]): logger.log( From a1b8e6f1aa9ab62a1437aa01ac5980c071df3e41 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Fri, 17 Jun 2016 00:29:03 +0200 Subject: [PATCH 083/134] Fixed bug in forced Search: _addCacheEntry paramater mismatch called from GenericProvider.py * Cleaned up tvcache.py (format, single quotes) * Enabled import unicode_literals for tvcache.py --- sickbeard/tvcache.py | 244 ++++++++++++-------------- sickrage/providers/GenericProvider.py | 2 +- 2 files changed, 116 insertions(+), 130 deletions(-) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index adcfddd940..2f2593b51d 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -17,6 +17,8 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import unicode_literals + import time import datetime import itertools @@ -32,68 +34,70 @@ class CacheDBConnection(db.DBConnection): - def __init__(self, provider_id): + def __init__(self, providerName): db.DBConnection.__init__(self, 'cache.db') # Create the table if it's not already there try: - if not self.hasTable(provider_id): - logger.log(u'Creating cache table for provider {0}'.format(provider_id), logger.DEBUG) + if not self.hasTable(providerName): + logger.log('Creating cache table for provider {0}'.format(providerName), logger.DEBUG) self.action( - 'CREATE TABLE [{0}] (name TEXT, season NUMERIC, episodes TEXT, indexerid NUMERIC,' - 'url TEXT, time NUMERIC, quality NUMERIC, release_group TEXT)'.format(provider_id)) + b'CREATE TABLE [{provider_name}] (name TEXT, season NUMERIC, episodes TEXT, indexerid NUMERIC, ' + b'url TEXT, time NUMERIC, quality NUMERIC, release_group TEXT)'.format(provider_name=providerName)) else: - sql_results = self.select('SELECT url, COUNT(url) AS count FROM [{0}] ' - 'GROUP BY url HAVING count > 1'.format(provider_id)) + sql_results = self.select(b'SELECT url, COUNT(url) AS count FROM [{provider_name}] ' + b'GROUP BY url HAVING count > 1'.format(provider_name=providerName)) for cur_dupe in sql_results: - self.action('DELETE FROM [{0}] WHERE url = ?'.format(provider_id), [cur_dupe['url']]) + self.action(b'DELETE FROM [{provider_name}] WHERE url = ?'.format(provider_name=providerName), [cur_dupe[b'url']]) # remove wrong old index - self.action('DROP INDEX IF EXISTS idx_url') + self.action(b'DROP INDEX IF EXISTS idx_url') # add unique index to prevent further dupes from happening if one does not exist - logger.log(u'Creating UNIQUE URL index for {0}'.format(provider_id), logger.DEBUG) - self.action('CREATE UNIQUE INDEX IF NOT EXISTS idx_url_{0} ON [{1}] (url)'.format(provider_id, provider_id)) + logger.log(b'Creating UNIQUE URL index for {0}'.format(providerName), logger.DEBUG) + self.action(b'CREATE UNIQUE INDEX IF NOT EXISTS idx_url_{0} ON [{1}] (url)'. + format(providerName, providerName)) # add release_group column to table if missing - if not self.hasColumn(provider_id, 'release_group'): - self.addColumn(provider_id, 'release_group', 'TEXT', '') + if not self.hasColumn(providerName, 'release_group'): + self.addColumn(providerName, 'release_group', 'TEXT', '') # add version column to table if missing - if not self.hasColumn(provider_id, 'version'): - self.addColumn(provider_id, 'version', 'NUMERIC', '-1') + if not self.hasColumn(providerName, 'version'): + self.addColumn(providerName, 'version', 'NUMERIC', '-1') # add seeders column to table if missing - if not self.hasColumn(provider_id, 'seeders'): - self.addColumn(provider_id, 'seeders', 'NUMERIC', '-1') + if not self.hasColumn(providerName, 'seeders'): + self.addColumn(providerName, 'seeders', 'NUMERIC', '-1') # add leechers column to table if missing - if not self.hasColumn(provider_id, 'leechers'): - self.addColumn(provider_id, 'leechers', 'NUMERIC', '-1') + if not self.hasColumn(providerName, 'leechers'): + self.addColumn(providerName, 'leechers', 'NUMERIC', '-1') # add size column to table if missing - if not self.hasColumn(provider_id, 'size'): - self.addColumn(provider_id, 'size', 'NUMERIC', '-1') + if not self.hasColumn(providerName, 'size'): + self.addColumn(providerName, 'size', 'NUMERIC', '-1') # add pubdate column to table if missing - if not self.hasColumn(provider_id, 'pubdate'): - self.addColumn(provider_id, 'pubdate', 'NUMERIC', '') + if not self.hasColumn(providerName, 'pubdate'): + self.addColumn(providerName, 'pubdate', 'NUMERIC', '') # add hash column to table if missing - if not self.hasColumn(provider_id, 'hash'): - self.addColumn(provider_id, 'hash', 'NUMERIC', '') + if not self.hasColumn(providerName, 'hash'): + self.addColumn(providerName, 'hash', 'NUMERIC', '') except Exception as e: - if str(e) != 'table [{0}] already exists'.format(provider_id): + if str(e) != 'table [{provider_name}] already exists'.format(provider_name=providerName): raise # Create the table if it's not already there try: if not self.hasTable('lastUpdate'): - self.action('CREATE TABLE lastUpdate (provider TEXT, time NUMERIC)') + self.action(b'CREATE TABLE lastUpdate (provider TEXT, time NUMERIC)') except Exception as e: - logger.log(u'Error while searching {0}, skipping: {1!r}'.format(self.provider.name, e), logger.DEBUG) + logger.log('Error while searching {provider_name}, skipping: {e!r}'. + format(provider_name=self.provider.name, e=e), logger.DEBUG) logger.log(traceback.format_exc(), logger.DEBUG) if str(e) != 'table lastUpdate already exists': raise @@ -102,17 +106,17 @@ def __init__(self, provider_id): class TVCache(object): def __init__(self, provider, **kwargs): self.provider = provider - self.provider_id = self.provider.get_id() - self.provider_db = None - self.minTime = kwargs.pop(u'min_time', 10) - self.search_params = kwargs.pop(u'search_params', dict(RSS=[''])) + self.providerID = self.provider.get_id() + self.providerDB = None + self.minTime = kwargs.pop('min_time', 10) + self.search_params = kwargs.pop('search_params', dict(RSS=[''])) - def _get_db(self): + def _getDB(self): # init provider database if not done already - if not self.provider_db: - self.provider_db = CacheDBConnection(self.provider_id) + if not self.providerDB: + self.providerDB = CacheDBConnection(self.providerID) - return self.provider_db + return self.providerDB def _clearCache(self): """ @@ -132,9 +136,9 @@ def trim_cache(self, days=None): if days: now = int(time.time()) # current timestamp retention_period = now - (days * 86400) - logger.log(u'Removing cache entries older than {x} days from {provider}'.format + logger.log('Removing cache entries older than {x} days from {provider}'.format (x=days, provider=self.providerID)) - cache_db_con = self._get_db() + cache_db_con = self._getDB() cache_db_con.action( b'DELETE FROM [{provider}] ' b'WHERE time < ? '.format(provider=self.providerID), @@ -165,7 +169,7 @@ def _get_hash(self, item): return self.provider._get_hash(item) def _getRSSData(self): - return {u'entries': self.provider.search(self.search_params)} if self.search_params else None + return {'entries': self.provider.search(self.search_params)} if self.search_params else None def _checkAuth(self, data): # pylint:disable=unused-argument, no-self-use return True @@ -187,58 +191,39 @@ def updateCache(self): # set updated self.setLastUpdate() - # get last 5 rss cache results - recent_results = self.provider.recent_results - found_recent_results = 0 # A counter that keeps track of the number of items that have been found in cache - cl = [] - index = 0 - for index, item in enumerate(data['entries'] or []): - if item['link'] in {cache_item['link'] for cache_item in recent_results}: - found_recent_results += 1 - - if found_recent_results >= self.provider.stop_at: - logger.log(u'Hit the old cached items, not parsing any more for: {0}'.format - (self.provider_id), logger.DEBUG) - break - try: - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - except UnicodeDecodeError as e: - logger.log(u'Unicode decoding error, missed parsing item from provider {0}: {1!r}'.format - (self.provider.name, e), logger.WARNING) - - cache_db_con = self._get_db() + for item in data['entries'] or []: + ci = self._parseItem(item) + if ci is not None: + cl.append(ci) + + cache_db_con = self._getDB() if cl: cache_db_con.mass_action(cl) - # finished processing, let's save the newest x (index) items and store these in cache with a max of 5 - # (overwritable per provider, throug hthe max_recent_items attribute. - self.provider.recent_results = data['entries'][0:min(index, self.provider.max_recent_items)] - except AuthException as e: - logger.log(u'Authentication error: {0!r}'.format(e), logger.ERROR) + logger.log('Authentication error: {0!r}'.format(ex(e)), logger.ERROR) except Exception as e: - logger.log(u'Error while searching {0}, skipping: {1!r}'.format(self.provider.name, e), logger.DEBUG) + logger.log('Error while searching {0}, skipping: {1!r}'.format(self.provider.name, e), logger.DEBUG) - def update_cache_manual_search(self, manual_data=None): + def update_cache_manual_search(self, manual_data=None, episode_obj=None): try: cl = [] for item in manual_data: - logger.log(u'Adding to cache item found in manual search: {0}'.format(item.name), logger.DEBUG) + logger.log('Adding to cache item found in manual search: {0}'.format(item.name), logger.DEBUG) ci = self._addCacheEntry(item.name, item.url, item.seeders, item.leechers, item.size, item.pubdate, item.hash) if ci is not None: cl.append(ci) except Exception as e: - logger.log(u'Error while adding to cache item found in manual seach for provider {0}, skipping: {1!r}'.format - (self.provider.name, e), logger.WARNING) + logger.log('Error while adding to cache item found in manual seach for provider {0},' + ' skipping: {1!r}'.format(self.provider.name, e), logger.WARNING) results = [] - cache_db_con = self._get_db() + cache_db_con = self._getDB() if cl: - logger.log(u'Mass updating cache table with manual results for provider: {0}'.format(self.provider.name), logger.DEBUG) + logger.log('Mass updating cache table with manual results for provider: {0}'. + format(self.provider.name), logger.DEBUG) results = cache_db_con.mass_action(cl) return any(results) @@ -250,7 +235,7 @@ def getRSSFeed(self, url, params=None): @staticmethod def _translateTitle(title): - return u'{0}'.format(title.replace(' ', '.')) + return '{0}'.format(title.replace(' ', '.')) @staticmethod def _translateLinkURL(url): @@ -261,6 +246,7 @@ def _parseItem(self, item): seeders, leechers = self._get_result_info(item) size = self._get_size(item) pubdate = self._get_pubdate(item) + torrent_hash = self._get_hash(item) self._checkItemAuth(title, url) @@ -268,21 +254,22 @@ def _parseItem(self, item): title = self._translateTitle(title) url = self._translateLinkURL(url) - # Placed the self._get_hash(item) inline, because hash is a buildin. Could cause issues. - return self._addCacheEntry(title, url, seeders, leechers, size, pubdate, self._get_hash(item)) + # logger.log('Attempting to add item to cache: ' + title, logger.DEBUG) + return self._addCacheEntry(title, url, seeders, leechers, size, pubdate, torrent_hash) else: - logger.log(u'The data returned from the {0} feed is incomplete, this result is unusable'.format - (self.provider.name), logger.DEBUG) + logger.log( + 'The data returned from the {0} feed is incomplete, this result is unusable'.format(self.provider.name), + logger.DEBUG) return False def _getLastUpdate(self): - cache_db_con = self._get_db() - sql_results = cache_db_con.select('SELECT time FROM lastUpdate WHERE provider = ?', [self.provider_id]) + cache_db_con = self._getDB() + sql_results = cache_db_con.select(b'SELECT time FROM lastUpdate WHERE provider = ?', [self.providerID]) if sql_results: - lastTime = int(sql_results[0]['time']) + lastTime = int(sql_results[0][b'time']) if lastTime > int(time.mktime(datetime.datetime.today().timetuple())): lastTime = 0 else: @@ -291,11 +278,11 @@ def _getLastUpdate(self): return datetime.datetime.fromtimestamp(lastTime) def _getLastSearch(self): - cache_db_con = self._get_db() - sql_results = cache_db_con.select('SELECT time FROM lastSearch WHERE provider = ?', [self.provider_id]) + cache_db_con = self._getDB() + sql_results = cache_db_con.select(b'SELECT time FROM lastSearch WHERE provider = ?', [self.providerID]) if sql_results: - lastTime = int(sql_results[0]['time']) + lastTime = int(sql_results[0][b'time']) if lastTime > int(time.mktime(datetime.datetime.today().timetuple())): lastTime = 0 else: @@ -307,22 +294,22 @@ def setLastUpdate(self, toDate=None): if not toDate: toDate = datetime.datetime.today() - cache_db_con = self._get_db() + cache_db_con = self._getDB() cache_db_con.upsert( - 'lastUpdate', - {'time': int(time.mktime(toDate.timetuple()))}, - {'provider': self.provider_id} + b'lastUpdate', + {b'time': int(time.mktime(toDate.timetuple()))}, + {b'provider': self.providerID} ) def setLastSearch(self, toDate=None): if not toDate: toDate = datetime.datetime.today() - cache_db_con = self._get_db() + cache_db_con = self._getDB() cache_db_con.upsert( - 'lastSearch', - {'time': int(time.mktime(toDate.timetuple()))}, - {'provider': self.provider_id} + b'lastSearch', + {b'time': int(time.mktime(toDate.timetuple()))}, + {b'provider': self.providerID} ) lastUpdate = property(_getLastUpdate) @@ -331,8 +318,8 @@ def setLastSearch(self, toDate=None): def shouldUpdate(self): # if we've updated recently then skip the update if datetime.datetime.today() - self.lastUpdate < datetime.timedelta(minutes=self.minTime): - logger.log(u'Last update was too soon, using old cache: {0}. Updated less then {1} minutes ago.'.format - (self.lastUpdate, self.minTime), logger.DEBUG) + logger.log('Last update was too soon, using old cache: {0}. ' + 'Updated less then {1} minutes ago'.format(self.lastUpdate, self.minTime), logger.DEBUG) return False return True @@ -344,12 +331,12 @@ def shouldClearCache(self): return False - def _addCacheEntry(self, name, url, seeders, leechers, size, pubdate, torrent_hash, parse_result=None, indexer_id=0): + def _addCacheEntry(self, name, url, seeders, leechers, size, pubdate, torrent_hash): try: parse_result = NameParser().parse(name) except (InvalidNameException, InvalidShowException) as error: - logger.log(u'{0}'.format(error), logger.DEBUG) + logger.log('{0}'.format(error), logger.DEBUG) return None if not parse_result or not parse_result.series_name: @@ -361,7 +348,7 @@ def _addCacheEntry(self, name, url, seeders, leechers, size, pubdate, torrent_ha if season is not None and episodes is not None: # store episodes as a seperated string - episodeText = '|{0}|'.format('|'.join({str(episode) for episode in episodes if episode})) + episodeText = b'|{0}|'.format(b'|'.join({str(episode) for episode in episodes if episode})) # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) @@ -377,11 +364,11 @@ def _addCacheEntry(self, name, url, seeders, leechers, size, pubdate, torrent_ha # get version version = parse_result.version - logger.log(u'Added RSS item: [{0}] to cache: [{1}]'.format(name, self.provider_id), logger.DEBUG) + logger.log('Added RSS item: [{0}] to cache: [{1}]'.format(name, self.providerID), logger.DEBUG) return [ - 'INSERT OR REPLACE INTO [{0}] (name, season, episodes, indexerid, url, time, quality, release_group, ' - 'version, seeders, leechers, size, pubdate, hash) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)'.format(self.provider_id), + b'INSERT OR REPLACE INTO [{provider_id}] (name, season, episodes, indexerid, url, time, quality, release_group, version, seeders, ' + b'leechers, size, pubdate, hash) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)'.format(provider_id=self.providerID), [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version, seeders, leechers, size, pubdate, torrent_hash]] @@ -390,33 +377,32 @@ def searchCache(self, episode, forced_search=False, downCurQuality=False): return neededEps[episode] if episode in neededEps else [] def listPropers(self, date=None): - cache_db_con = self._get_db() - sql = "SELECT * FROM [{0}] WHERE name LIKE '%.PROPER.%' OR name LIKE '%.REPACK.%'".format(self.provider_id) + cache_db_con = self._getDB() + sql = b"SELECT * FROM [{provider_id}] WHERE name LIKE '%.PROPER.%' OR name LIKE '%.REPACK.%'".format(provider_id=self.providerID) if date is not None: - sql += ' AND time >= {0}'.format(int(time.mktime(date.timetuple()))) + sql += b' AND time >= {0}'.format(int(time.mktime(date.timetuple()))) propers_results = cache_db_con.select(sql) - return [x for x in propers_results if x['indexerid']] + return [x for x in propers_results if x[b'indexerid']] def findNeededEpisodes(self, episode, forced_search=False, downCurQuality=False): # pylint:disable=too-many-locals, too-many-branches neededEps = {} cl = [] - cache_db_con = self._get_db() + cache_db_con = self._getDB() if not episode: - sql_results = cache_db_con.select('SELECT * FROM [{0}]'.format(self.provider_id)) + sql_results = cache_db_con.select(b'SELECT * FROM [{provider_id}]'.format(provider_id=self.providerID)) elif not isinstance(episode, list): sql_results = cache_db_con.select( - 'SELECT * FROM [{0}] WHERE indexerid = ? AND season = ? AND episodes LIKE ?'.format(self.provider_id), - [episode.show.indexerid, episode.season, '%|{0}|%'.format(episode.episode)]) + b'SELECT * FROM [{provider_id}] WHERE indexerid = ? AND season = ? AND episodes LIKE ?'.format(provider_id=self.providerID), + [episode.show.indexerid, episode.season, b'%|{0}|%'.format(episode.episode)]) else: for epObj in episode: cl.append([ - 'SELECT * FROM [{0}] WHERE indexerid = ? AND season = ? AND episodes LIKE ? AND quality IN ({1})'.format - (self.provider_id, ','.join( - [str(x) for x in epObj.wantedQuality])), - [epObj.show.indexerid, epObj.season, '%|{0}|%'.format(epObj.episode)]]) + b'SELECT * FROM [{0}] WHERE indexerid = ? AND season = ? AND episodes LIKE ? AND quality IN ({1})'. + format(self.providerID, ','.join([str(x) for x in epObj.wantedQuality])), + [epObj.show.indexerid, epObj.season, b"%|{0}|%".format(epObj.episode)]]) sql_results = cache_db_con.mass_action(cl, fetchall=True) sql_results = list(itertools.chain(*sql_results)) @@ -424,55 +410,55 @@ def findNeededEpisodes(self, episode, forced_search=False, downCurQuality=False) # for each cache entry for curResult in sql_results: # ignored/required words, and non-tv junk - if not show_name_helpers.filterBadReleases(curResult['name']): + if not show_name_helpers.filterBadReleases(curResult[b"name"]): continue # get the show object, or if it's not one of our shows then ignore it - showObj = Show.find(sickbeard.showList, int(curResult['indexerid'])) + showObj = Show.find(sickbeard.showList, int(curResult[b"indexerid"])) if not showObj: continue # skip if provider is anime only and show is not anime if self.provider.anime_only and not showObj.is_anime: - logger.log(u'{0} is not an anime, skiping'.format(showObj.name), logger.DEBUG) + logger.log("{0} is not an anime, skiping".format(showObj.name), logger.DEBUG) continue # get season and ep data (ignoring multi-eps for now) - curSeason = int(curResult['season']) + curSeason = int(curResult[b"season"]) if curSeason == -1: continue - curEp = curResult['episodes'].split('|')[1] + curEp = curResult[b"episodes"].split("|")[1] if not curEp: continue curEp = int(curEp) - curQuality = int(curResult['quality']) - curReleaseGroup = curResult['release_group'] - curVersion = curResult['version'] + curQuality = int(curResult[b"quality"]) + curReleaseGroup = curResult[b"release_group"] + curVersion = curResult[b"version"] # if the show says we want that episode then add it to the list if not showObj.wantEpisode(curSeason, curEp, curQuality, forced_search, downCurQuality): - logger.log(u'Ignoring {0}'.format(curResult['name']), logger.DEBUG) + logger.log("Ignoring {0}".format(curResult[b"name"]), logger.DEBUG) continue epObj = showObj.getEpisode(curSeason, curEp) # build a result object - title = curResult['name'] - url = curResult['url'] + title = curResult[b"name"] + url = curResult[b"url"] - logger.log(u'Found result {0} at {1}'.format(title, url)) + logger.log("Found result {0} at {1}".format(title, url)) result = self.provider.get_result([epObj]) result.show = showObj result.url = url - result.seeders = curResult['seeders'] - result.leechers = curResult['leechers'] - result.size = curResult['size'] - result.pubdate = curResult['pubdate'] - result.hash = curResult['hash'] + result.seeders = curResult[b"seeders"] + result.leechers = curResult[b"leechers"] + result.size = curResult[b"size"] + result.pubdate = curResult[b"pubdate"] + result.hash = curResult[b"hash"] result.name = title result.quality = curQuality result.release_group = curReleaseGroup diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py index e1fad5b848..de3f723338 100644 --- a/sickrage/providers/GenericProvider.py +++ b/sickrage/providers/GenericProvider.py @@ -274,7 +274,7 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, logger.log('Adding item from search to cache: %s' % title, logger.DEBUG) # pylint: disable=protected-access # Access to a protected member of a client class - ci = self.cache._addCacheEntry(title, url, seeders, leechers, size, pubdate, torrent_hash, parse_result=parse_result) + ci = self.cache._addCacheEntry(title, url, seeders, leechers, size, pubdate, torrent_hash) if ci is not None: cl.append(ci) From 3d39affec8d2e6dfe63d9a0615e39fb8403c85aa Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sun, 19 Jun 2016 22:44:55 +0200 Subject: [PATCH 084/134] Replaced double quotes --- sickbeard/tvcache.py | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 2f2593b51d..55b7a92f37 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -402,7 +402,7 @@ def findNeededEpisodes(self, episode, forced_search=False, downCurQuality=False) cl.append([ b'SELECT * FROM [{0}] WHERE indexerid = ? AND season = ? AND episodes LIKE ? AND quality IN ({1})'. format(self.providerID, ','.join([str(x) for x in epObj.wantedQuality])), - [epObj.show.indexerid, epObj.season, b"%|{0}|%".format(epObj.episode)]]) + [epObj.show.indexerid, epObj.season, b'%|{0}|%'.format(epObj.episode)]]) sql_results = cache_db_con.mass_action(cl, fetchall=True) sql_results = list(itertools.chain(*sql_results)) @@ -410,55 +410,55 @@ def findNeededEpisodes(self, episode, forced_search=False, downCurQuality=False) # for each cache entry for curResult in sql_results: # ignored/required words, and non-tv junk - if not show_name_helpers.filterBadReleases(curResult[b"name"]): + if not show_name_helpers.filterBadReleases(curResult[b'name']): continue # get the show object, or if it's not one of our shows then ignore it - showObj = Show.find(sickbeard.showList, int(curResult[b"indexerid"])) + showObj = Show.find(sickbeard.showList, int(curResult[b'indexerid'])) if not showObj: continue # skip if provider is anime only and show is not anime if self.provider.anime_only and not showObj.is_anime: - logger.log("{0} is not an anime, skiping".format(showObj.name), logger.DEBUG) + logger.log('{0} is not an anime, skiping'.format(showObj.name), logger.DEBUG) continue # get season and ep data (ignoring multi-eps for now) - curSeason = int(curResult[b"season"]) + curSeason = int(curResult[b'season']) if curSeason == -1: continue - curEp = curResult[b"episodes"].split("|")[1] + curEp = curResult[b'episodes'].split('|')[1] if not curEp: continue curEp = int(curEp) - curQuality = int(curResult[b"quality"]) - curReleaseGroup = curResult[b"release_group"] - curVersion = curResult[b"version"] + curQuality = int(curResult[b'quality']) + curReleaseGroup = curResult[b'release_group'] + curVersion = curResult[b'version'] # if the show says we want that episode then add it to the list if not showObj.wantEpisode(curSeason, curEp, curQuality, forced_search, downCurQuality): - logger.log("Ignoring {0}".format(curResult[b"name"]), logger.DEBUG) + logger.log('Ignoring {0}'.format(curResult[b'name']), logger.DEBUG) continue epObj = showObj.getEpisode(curSeason, curEp) # build a result object - title = curResult[b"name"] - url = curResult[b"url"] + title = curResult[b'name'] + url = curResult[b'url'] - logger.log("Found result {0} at {1}".format(title, url)) + logger.log('Found result {0} at {1}'.format(title, url)) result = self.provider.get_result([epObj]) result.show = showObj result.url = url - result.seeders = curResult[b"seeders"] - result.leechers = curResult[b"leechers"] - result.size = curResult[b"size"] - result.pubdate = curResult[b"pubdate"] - result.hash = curResult[b"hash"] + result.seeders = curResult[b'seeders'] + result.leechers = curResult[b'leechers'] + result.size = curResult[b'size'] + result.pubdate = curResult[b'pubdate'] + result.hash = curResult[b'hash'] result.name = title result.quality = curQuality result.release_group = curReleaseGroup From e5d1adb1fecdaccd66ea093c6c5262c7e91d9127 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Mon, 20 Jun 2016 22:06:55 +0200 Subject: [PATCH 085/134] PEP8 naming fixes, to be able to rebase on dev. --- sickbeard/tvcache.py | 80 ++++++++++++++++++++++---------------------- 1 file changed, 40 insertions(+), 40 deletions(-) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 55b7a92f37..d4e25ac61c 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -34,61 +34,61 @@ class CacheDBConnection(db.DBConnection): - def __init__(self, providerName): + def __init__(self, provider_name): db.DBConnection.__init__(self, 'cache.db') # Create the table if it's not already there try: - if not self.hasTable(providerName): - logger.log('Creating cache table for provider {0}'.format(providerName), logger.DEBUG) + if not self.hasTable(provider_name): + logger.log('Creating cache table for provider {0}'.format(provider_name), logger.DEBUG) self.action( b'CREATE TABLE [{provider_name}] (name TEXT, season NUMERIC, episodes TEXT, indexerid NUMERIC, ' - b'url TEXT, time NUMERIC, quality NUMERIC, release_group TEXT)'.format(provider_name=providerName)) + b'url TEXT, time NUMERIC, quality NUMERIC, release_group TEXT)'.format(provider_name=provider_name)) else: sql_results = self.select(b'SELECT url, COUNT(url) AS count FROM [{provider_name}] ' - b'GROUP BY url HAVING count > 1'.format(provider_name=providerName)) + b'GROUP BY url HAVING count > 1'.format(provider_name=provider_name)) for cur_dupe in sql_results: - self.action(b'DELETE FROM [{provider_name}] WHERE url = ?'.format(provider_name=providerName), [cur_dupe[b'url']]) + self.action(b'DELETE FROM [{provider_name}] WHERE url = ?'.format(provider_name=provider_name), [cur_dupe[b'url']]) # remove wrong old index self.action(b'DROP INDEX IF EXISTS idx_url') # add unique index to prevent further dupes from happening if one does not exist - logger.log(b'Creating UNIQUE URL index for {0}'.format(providerName), logger.DEBUG) + logger.log(b'Creating UNIQUE URL index for {0}'.format(provider_name), logger.DEBUG) self.action(b'CREATE UNIQUE INDEX IF NOT EXISTS idx_url_{0} ON [{1}] (url)'. - format(providerName, providerName)) + format(provider_name, provider_name)) # add release_group column to table if missing - if not self.hasColumn(providerName, 'release_group'): - self.addColumn(providerName, 'release_group', 'TEXT', '') + if not self.hasColumn(provider_name, 'release_group'): + self.addColumn(provider_name, 'release_group', 'TEXT', '') # add version column to table if missing - if not self.hasColumn(providerName, 'version'): - self.addColumn(providerName, 'version', 'NUMERIC', '-1') + if not self.hasColumn(provider_name, 'version'): + self.addColumn(provider_name, 'version', 'NUMERIC', '-1') # add seeders column to table if missing - if not self.hasColumn(providerName, 'seeders'): - self.addColumn(providerName, 'seeders', 'NUMERIC', '-1') + if not self.hasColumn(provider_name, 'seeders'): + self.addColumn(provider_name, 'seeders', 'NUMERIC', '-1') # add leechers column to table if missing - if not self.hasColumn(providerName, 'leechers'): - self.addColumn(providerName, 'leechers', 'NUMERIC', '-1') + if not self.hasColumn(provider_name, 'leechers'): + self.addColumn(provider_name, 'leechers', 'NUMERIC', '-1') # add size column to table if missing - if not self.hasColumn(providerName, 'size'): - self.addColumn(providerName, 'size', 'NUMERIC', '-1') + if not self.hasColumn(provider_name, 'size'): + self.addColumn(provider_name, 'size', 'NUMERIC', '-1') # add pubdate column to table if missing - if not self.hasColumn(providerName, 'pubdate'): - self.addColumn(providerName, 'pubdate', 'NUMERIC', '') + if not self.hasColumn(provider_name, 'pubdate'): + self.addColumn(provider_name, 'pubdate', 'NUMERIC', '') # add hash column to table if missing - if not self.hasColumn(providerName, 'hash'): - self.addColumn(providerName, 'hash', 'NUMERIC', '') + if not self.hasColumn(provider_name, 'hash'): + self.addColumn(provider_name, 'hash', 'NUMERIC', '') except Exception as e: - if str(e) != 'table [{provider_name}] already exists'.format(provider_name=providerName): + if str(e) != 'table [{provider_name}] already exists'.format(provider_name=provider_name): raise # Create the table if it's not already there @@ -106,17 +106,17 @@ def __init__(self, providerName): class TVCache(object): def __init__(self, provider, **kwargs): self.provider = provider - self.providerID = self.provider.get_id() - self.providerDB = None + self.provider_id = self.provider.get_id() + self.provider_db = None self.minTime = kwargs.pop('min_time', 10) self.search_params = kwargs.pop('search_params', dict(RSS=[''])) def _getDB(self): # init provider database if not done already - if not self.providerDB: - self.providerDB = CacheDBConnection(self.providerID) + if not self.provider_db: + self.provider_db = CacheDBConnection(self.provider_id) - return self.providerDB + return self.provider_db def _clearCache(self): """ @@ -137,11 +137,11 @@ def trim_cache(self, days=None): now = int(time.time()) # current timestamp retention_period = now - (days * 86400) logger.log('Removing cache entries older than {x} days from {provider}'.format - (x=days, provider=self.providerID)) + (x=days, provider=self.provider_id)) cache_db_con = self._getDB() cache_db_con.action( b'DELETE FROM [{provider}] ' - b'WHERE time < ? '.format(provider=self.providerID), + b'WHERE time < ? '.format(provider=self.provider_id), [retention_period] ) @@ -266,7 +266,7 @@ def _parseItem(self, item): def _getLastUpdate(self): cache_db_con = self._getDB() - sql_results = cache_db_con.select(b'SELECT time FROM lastUpdate WHERE provider = ?', [self.providerID]) + sql_results = cache_db_con.select(b'SELECT time FROM lastUpdate WHERE provider = ?', [self.provider_id]) if sql_results: lastTime = int(sql_results[0][b'time']) @@ -279,7 +279,7 @@ def _getLastUpdate(self): def _getLastSearch(self): cache_db_con = self._getDB() - sql_results = cache_db_con.select(b'SELECT time FROM lastSearch WHERE provider = ?', [self.providerID]) + sql_results = cache_db_con.select(b'SELECT time FROM lastSearch WHERE provider = ?', [self.provider_id]) if sql_results: lastTime = int(sql_results[0][b'time']) @@ -298,7 +298,7 @@ def setLastUpdate(self, toDate=None): cache_db_con.upsert( b'lastUpdate', {b'time': int(time.mktime(toDate.timetuple()))}, - {b'provider': self.providerID} + {b'provider': self.provider_id} ) def setLastSearch(self, toDate=None): @@ -309,7 +309,7 @@ def setLastSearch(self, toDate=None): cache_db_con.upsert( b'lastSearch', {b'time': int(time.mktime(toDate.timetuple()))}, - {b'provider': self.providerID} + {b'provider': self.provider_id} ) lastUpdate = property(_getLastUpdate) @@ -364,11 +364,11 @@ def _addCacheEntry(self, name, url, seeders, leechers, size, pubdate, torrent_ha # get version version = parse_result.version - logger.log('Added RSS item: [{0}] to cache: [{1}]'.format(name, self.providerID), logger.DEBUG) + logger.log('Added RSS item: [{0}] to cache: [{1}]'.format(name, self.provider_id), logger.DEBUG) return [ b'INSERT OR REPLACE INTO [{provider_id}] (name, season, episodes, indexerid, url, time, quality, release_group, version, seeders, ' - b'leechers, size, pubdate, hash) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)'.format(provider_id=self.providerID), + b'leechers, size, pubdate, hash) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)'.format(provider_id=self.provider_id), [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version, seeders, leechers, size, pubdate, torrent_hash]] @@ -378,7 +378,7 @@ def searchCache(self, episode, forced_search=False, downCurQuality=False): def listPropers(self, date=None): cache_db_con = self._getDB() - sql = b"SELECT * FROM [{provider_id}] WHERE name LIKE '%.PROPER.%' OR name LIKE '%.REPACK.%'".format(provider_id=self.providerID) + sql = b"SELECT * FROM [{provider_id}] WHERE name LIKE '%.PROPER.%' OR name LIKE '%.REPACK.%'".format(provider_id=self.provider_id) if date is not None: sql += b' AND time >= {0}'.format(int(time.mktime(date.timetuple()))) @@ -392,16 +392,16 @@ def findNeededEpisodes(self, episode, forced_search=False, downCurQuality=False) cache_db_con = self._getDB() if not episode: - sql_results = cache_db_con.select(b'SELECT * FROM [{provider_id}]'.format(provider_id=self.providerID)) + sql_results = cache_db_con.select(b'SELECT * FROM [{provider_id}]'.format(provider_id=self.provider_id)) elif not isinstance(episode, list): sql_results = cache_db_con.select( - b'SELECT * FROM [{provider_id}] WHERE indexerid = ? AND season = ? AND episodes LIKE ?'.format(provider_id=self.providerID), + b'SELECT * FROM [{provider_id}] WHERE indexerid = ? AND season = ? AND episodes LIKE ?'.format(provider_id=self.provider_id), [episode.show.indexerid, episode.season, b'%|{0}|%'.format(episode.episode)]) else: for epObj in episode: cl.append([ b'SELECT * FROM [{0}] WHERE indexerid = ? AND season = ? AND episodes LIKE ? AND quality IN ({1})'. - format(self.providerID, ','.join([str(x) for x in epObj.wantedQuality])), + format(self.provider_id, ','.join([str(x) for x in epObj.wantedQuality])), [epObj.show.indexerid, epObj.season, b'%|{0}|%'.format(epObj.episode)]]) sql_results = cache_db_con.mass_action(cl, fetchall=True) From caee4cf51a071fad69a6928aee8baaabd6ba94ff Mon Sep 17 00:00:00 2001 From: P0psicles Date: Mon, 20 Jun 2016 22:16:59 +0200 Subject: [PATCH 086/134] Don't need b'' here. --- sickbeard/tvcache.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index d4e25ac61c..d3c6f050c7 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -348,7 +348,7 @@ def _addCacheEntry(self, name, url, seeders, leechers, size, pubdate, torrent_ha if season is not None and episodes is not None: # store episodes as a seperated string - episodeText = b'|{0}|'.format(b'|'.join({str(episode) for episode in episodes if episode})) + episodeText = '|{0}|'.format('|'.join({str(episode) for episode in episodes if episode})) # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) From 0684e91394094a2366d61d5bfba0acfbf364b388 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Mon, 20 Jun 2016 22:23:44 +0200 Subject: [PATCH 087/134] changed provider_name to provider_id --- sickbeard/tvcache.py | 54 ++++++++++++++++++++++---------------------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index d3c6f050c7..8688a229fa 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -34,61 +34,61 @@ class CacheDBConnection(db.DBConnection): - def __init__(self, provider_name): + def __init__(self, provider_id): db.DBConnection.__init__(self, 'cache.db') # Create the table if it's not already there try: - if not self.hasTable(provider_name): - logger.log('Creating cache table for provider {0}'.format(provider_name), logger.DEBUG) + if not self.hasTable(provider_id): + logger.log('Creating cache table for provider {0}'.format(provider_id), logger.DEBUG) self.action( - b'CREATE TABLE [{provider_name}] (name TEXT, season NUMERIC, episodes TEXT, indexerid NUMERIC, ' - b'url TEXT, time NUMERIC, quality NUMERIC, release_group TEXT)'.format(provider_name=provider_name)) + b'CREATE TABLE [{provider_id}] (name TEXT, season NUMERIC, episodes TEXT, indexerid NUMERIC, ' + b'url TEXT, time NUMERIC, quality NUMERIC, release_group TEXT)'.format(provider_id=provider_id)) else: - sql_results = self.select(b'SELECT url, COUNT(url) AS count FROM [{provider_name}] ' - b'GROUP BY url HAVING count > 1'.format(provider_name=provider_name)) + sql_results = self.select(b'SELECT url, COUNT(url) AS count FROM [{provider_id}] ' + b'GROUP BY url HAVING count > 1'.format(provider_id=provider_id)) for cur_dupe in sql_results: - self.action(b'DELETE FROM [{provider_name}] WHERE url = ?'.format(provider_name=provider_name), [cur_dupe[b'url']]) + self.action(b'DELETE FROM [{provider_id}] WHERE url = ?'.format(provider_id=provider_id), [cur_dupe[b'url']]) # remove wrong old index self.action(b'DROP INDEX IF EXISTS idx_url') # add unique index to prevent further dupes from happening if one does not exist - logger.log(b'Creating UNIQUE URL index for {0}'.format(provider_name), logger.DEBUG) + logger.log(b'Creating UNIQUE URL index for {0}'.format(provider_id), logger.DEBUG) self.action(b'CREATE UNIQUE INDEX IF NOT EXISTS idx_url_{0} ON [{1}] (url)'. - format(provider_name, provider_name)) + format(provider_id, provider_id)) # add release_group column to table if missing - if not self.hasColumn(provider_name, 'release_group'): - self.addColumn(provider_name, 'release_group', 'TEXT', '') + if not self.hasColumn(provider_id, 'release_group'): + self.addColumn(provider_id, 'release_group', 'TEXT', '') # add version column to table if missing - if not self.hasColumn(provider_name, 'version'): - self.addColumn(provider_name, 'version', 'NUMERIC', '-1') + if not self.hasColumn(provider_id, 'version'): + self.addColumn(provider_id, 'version', 'NUMERIC', '-1') # add seeders column to table if missing - if not self.hasColumn(provider_name, 'seeders'): - self.addColumn(provider_name, 'seeders', 'NUMERIC', '-1') + if not self.hasColumn(provider_id, 'seeders'): + self.addColumn(provider_id, 'seeders', 'NUMERIC', '-1') # add leechers column to table if missing - if not self.hasColumn(provider_name, 'leechers'): - self.addColumn(provider_name, 'leechers', 'NUMERIC', '-1') + if not self.hasColumn(provider_id, 'leechers'): + self.addColumn(provider_id, 'leechers', 'NUMERIC', '-1') # add size column to table if missing - if not self.hasColumn(provider_name, 'size'): - self.addColumn(provider_name, 'size', 'NUMERIC', '-1') + if not self.hasColumn(provider_id, 'size'): + self.addColumn(provider_id, 'size', 'NUMERIC', '-1') # add pubdate column to table if missing - if not self.hasColumn(provider_name, 'pubdate'): - self.addColumn(provider_name, 'pubdate', 'NUMERIC', '') + if not self.hasColumn(provider_id, 'pubdate'): + self.addColumn(provider_id, 'pubdate', 'NUMERIC', '') # add hash column to table if missing - if not self.hasColumn(provider_name, 'hash'): - self.addColumn(provider_name, 'hash', 'NUMERIC', '') + if not self.hasColumn(provider_id, 'hash'): + self.addColumn(provider_id, 'hash', 'NUMERIC', '') except Exception as e: - if str(e) != 'table [{provider_name}] already exists'.format(provider_name=provider_name): + if str(e) != 'table [{provider_id}] already exists'.format(provider_id=provider_id): raise # Create the table if it's not already there @@ -96,8 +96,8 @@ def __init__(self, provider_name): if not self.hasTable('lastUpdate'): self.action(b'CREATE TABLE lastUpdate (provider TEXT, time NUMERIC)') except Exception as e: - logger.log('Error while searching {provider_name}, skipping: {e!r}'. - format(provider_name=self.provider.name, e=e), logger.DEBUG) + logger.log('Error while searching {provider_id}, skipping: {e!r}'. + format(provider_id=self.provider.name, e=e), logger.DEBUG) logger.log(traceback.format_exc(), logger.DEBUG) if str(e) != 'table lastUpdate already exists': raise From d8e894ee3abde31afae456f0c9a45a15e9d2fcac Mon Sep 17 00:00:00 2001 From: P0psicles Date: Mon, 20 Jun 2016 22:39:23 +0200 Subject: [PATCH 088/134] Fixed more rebase typos --- sickbeard/tvcache.py | 54 ++++++++++++++++++++++++++++++-------------- 1 file changed, 37 insertions(+), 17 deletions(-) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 8688a229fa..6d224d71f3 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -111,7 +111,7 @@ def __init__(self, provider, **kwargs): self.minTime = kwargs.pop('min_time', 10) self.search_params = kwargs.pop('search_params', dict(RSS=[''])) - def _getDB(self): + def _get_db(self): # init provider database if not done already if not self.provider_db: self.provider_db = CacheDBConnection(self.provider_id) @@ -138,7 +138,7 @@ def trim_cache(self, days=None): retention_period = now - (days * 86400) logger.log('Removing cache entries older than {x} days from {provider}'.format (x=days, provider=self.provider_id)) - cache_db_con = self._getDB() + cache_db_con = self._get_db() cache_db_con.action( b'DELETE FROM [{provider}] ' b'WHERE time < ? '.format(provider=self.provider_id), @@ -191,22 +191,42 @@ def updateCache(self): # set updated self.setLastUpdate() - cl = [] - for item in data['entries'] or []: - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) + # get last 5 rss cache results + recent_results = self.provider.recent_results + found_recent_results = 0 # A counter that keeps track of the number of items that have been found in cache - cache_db_con = self._getDB() + cl = [] + index = 0 + for index, item in enumerate(data['entries'] or []): + if item['link'] in {cache_item['link'] for cache_item in recent_results}: + found_recent_results += 1 + + if found_recent_results >= self.provider.stop_at: + logger.log('Hit the old cached items, not parsing any more for: {0}'.format + (self.provider_id), logger.DEBUG) + break + try: + ci = self._parseItem(item) + if ci is not None: + cl.append(ci) + except UnicodeDecodeError as e: + logger.log('Unicode decoding error, missed parsing item from provider {0}: {1!r}'.format + (self.provider.name, e), logger.WARNING) + + cache_db_con = self._get_db() if cl: cache_db_con.mass_action(cl) + # finished processing, let's save the newest x (index) items and store these in cache with a max of 5 + # (overwritable per provider, throug hthe max_recent_items attribute. + self.provider.recent_results = data['entries'][0:min(index, self.provider.max_recent_items)] + except AuthException as e: - logger.log('Authentication error: {0!r}'.format(ex(e)), logger.ERROR) + logger.log('Authentication error: {0!r}'.format(e), logger.ERROR) except Exception as e: logger.log('Error while searching {0}, skipping: {1!r}'.format(self.provider.name, e), logger.DEBUG) - def update_cache_manual_search(self, manual_data=None, episode_obj=None): + def update_cache_manual_search(self, manual_data=None): try: cl = [] @@ -220,7 +240,7 @@ def update_cache_manual_search(self, manual_data=None, episode_obj=None): ' skipping: {1!r}'.format(self.provider.name, e), logger.WARNING) results = [] - cache_db_con = self._getDB() + cache_db_con = self._get_db() if cl: logger.log('Mass updating cache table with manual results for provider: {0}'. format(self.provider.name), logger.DEBUG) @@ -265,7 +285,7 @@ def _parseItem(self, item): return False def _getLastUpdate(self): - cache_db_con = self._getDB() + cache_db_con = self._get_db() sql_results = cache_db_con.select(b'SELECT time FROM lastUpdate WHERE provider = ?', [self.provider_id]) if sql_results: @@ -278,7 +298,7 @@ def _getLastUpdate(self): return datetime.datetime.fromtimestamp(lastTime) def _getLastSearch(self): - cache_db_con = self._getDB() + cache_db_con = self._get_db() sql_results = cache_db_con.select(b'SELECT time FROM lastSearch WHERE provider = ?', [self.provider_id]) if sql_results: @@ -294,7 +314,7 @@ def setLastUpdate(self, toDate=None): if not toDate: toDate = datetime.datetime.today() - cache_db_con = self._getDB() + cache_db_con = self._get_db() cache_db_con.upsert( b'lastUpdate', {b'time': int(time.mktime(toDate.timetuple()))}, @@ -305,7 +325,7 @@ def setLastSearch(self, toDate=None): if not toDate: toDate = datetime.datetime.today() - cache_db_con = self._getDB() + cache_db_con = self._get_db() cache_db_con.upsert( b'lastSearch', {b'time': int(time.mktime(toDate.timetuple()))}, @@ -377,7 +397,7 @@ def searchCache(self, episode, forced_search=False, downCurQuality=False): return neededEps[episode] if episode in neededEps else [] def listPropers(self, date=None): - cache_db_con = self._getDB() + cache_db_con = self._get_db() sql = b"SELECT * FROM [{provider_id}] WHERE name LIKE '%.PROPER.%' OR name LIKE '%.REPACK.%'".format(provider_id=self.provider_id) if date is not None: @@ -390,7 +410,7 @@ def findNeededEpisodes(self, episode, forced_search=False, downCurQuality=False) neededEps = {} cl = [] - cache_db_con = self._getDB() + cache_db_con = self._get_db() if not episode: sql_results = cache_db_con.select(b'SELECT * FROM [{provider_id}]'.format(provider_id=self.provider_id)) elif not isinstance(episode, list): From d718bf33009450d8acb7416463beb999969f649d Mon Sep 17 00:00:00 2001 From: P0psicles Date: Mon, 20 Jun 2016 22:41:05 +0200 Subject: [PATCH 089/134] Last rebase typo? --- sickbeard/tvcache.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 6d224d71f3..6c2738162b 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -55,7 +55,7 @@ def __init__(self, provider_id): self.action(b'DROP INDEX IF EXISTS idx_url') # add unique index to prevent further dupes from happening if one does not exist - logger.log(b'Creating UNIQUE URL index for {0}'.format(provider_id), logger.DEBUG) + logger.log('Creating UNIQUE URL index for {0}'.format(provider_id), logger.DEBUG) self.action(b'CREATE UNIQUE INDEX IF NOT EXISTS idx_url_{0} ON [{1}] (url)'. format(provider_id, provider_id)) From e8c78158091247eb7c57d8eba2ee48c382558da0 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Mon, 20 Jun 2016 22:52:32 +0200 Subject: [PATCH 090/134] Did the last of the pep8 name changes --- sickbeard/tvcache.py | 82 ++++++++++++++++++++++---------------------- 1 file changed, 41 insertions(+), 41 deletions(-) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 6c2738162b..a0f7267809 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -371,7 +371,7 @@ def _addCacheEntry(self, name, url, seeders, leechers, size, pubdate, torrent_ha episodeText = '|{0}|'.format('|'.join({str(episode) for episode in episodes if episode})) # get the current timestamp - curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) + cur_timestamp = int(time.mktime(datetime.datetime.today().timetuple())) # get quality of release quality = parse_result.quality @@ -389,12 +389,12 @@ def _addCacheEntry(self, name, url, seeders, leechers, size, pubdate, torrent_ha return [ b'INSERT OR REPLACE INTO [{provider_id}] (name, season, episodes, indexerid, url, time, quality, release_group, version, seeders, ' b'leechers, size, pubdate, hash) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)'.format(provider_id=self.provider_id), - [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, + [name, season, episodeText, parse_result.show.indexerid, url, cur_timestamp, quality, release_group, version, seeders, leechers, size, pubdate, torrent_hash]] def searchCache(self, episode, forced_search=False, downCurQuality=False): - neededEps = self.findNeededEpisodes(episode, forced_search, downCurQuality) - return neededEps[episode] if episode in neededEps else [] + needed_eps = self.findNeededEpisodes(episode, forced_search, downCurQuality) + return needed_eps[episode] if episode in needed_eps else [] def listPropers(self, date=None): cache_db_con = self._get_db() @@ -407,7 +407,7 @@ def listPropers(self, date=None): return [x for x in propers_results if x[b'indexerid']] def findNeededEpisodes(self, episode, forced_search=False, downCurQuality=False): # pylint:disable=too-many-locals, too-many-branches - neededEps = {} + needed_eps = {} cl = [] cache_db_con = self._get_db() @@ -418,80 +418,80 @@ def findNeededEpisodes(self, episode, forced_search=False, downCurQuality=False) b'SELECT * FROM [{provider_id}] WHERE indexerid = ? AND season = ? AND episodes LIKE ?'.format(provider_id=self.provider_id), [episode.show.indexerid, episode.season, b'%|{0}|%'.format(episode.episode)]) else: - for epObj in episode: + for ep_obj in episode: cl.append([ b'SELECT * FROM [{0}] WHERE indexerid = ? AND season = ? AND episodes LIKE ? AND quality IN ({1})'. - format(self.provider_id, ','.join([str(x) for x in epObj.wantedQuality])), - [epObj.show.indexerid, epObj.season, b'%|{0}|%'.format(epObj.episode)]]) + format(self.provider_id, ','.join([str(x) for x in ep_obj.wantedQuality])), + [ep_obj.show.indexerid, ep_obj.season, b'%|{0}|%'.format(ep_obj.episode)]]) sql_results = cache_db_con.mass_action(cl, fetchall=True) sql_results = list(itertools.chain(*sql_results)) # for each cache entry - for curResult in sql_results: + for cur_result in sql_results: # ignored/required words, and non-tv junk - if not show_name_helpers.filterBadReleases(curResult[b'name']): + if not show_name_helpers.filterBadReleases(cur_result[b'name']): continue # get the show object, or if it's not one of our shows then ignore it - showObj = Show.find(sickbeard.showList, int(curResult[b'indexerid'])) - if not showObj: + show_obj = Show.find(sickbeard.showList, int(cur_result[b'indexerid'])) + if not show_obj: continue # skip if provider is anime only and show is not anime - if self.provider.anime_only and not showObj.is_anime: - logger.log('{0} is not an anime, skiping'.format(showObj.name), logger.DEBUG) + if self.provider.anime_only and not show_obj.is_anime: + logger.log('{0} is not an anime, skiping'.format(show_obj.name), logger.DEBUG) continue # get season and ep data (ignoring multi-eps for now) - curSeason = int(curResult[b'season']) - if curSeason == -1: + cur_season = int(cur_result[b'season']) + if cur_season == -1: continue - curEp = curResult[b'episodes'].split('|')[1] - if not curEp: + cur_ep = cur_result[b'episodes'].split('|')[1] + if not cur_ep: continue - curEp = int(curEp) + cur_ep = int(cur_ep) - curQuality = int(curResult[b'quality']) - curReleaseGroup = curResult[b'release_group'] - curVersion = curResult[b'version'] + cur_quality = int(cur_result[b'quality']) + cur_release_group = cur_result[b'release_group'] + cur_version = cur_result[b'version'] # if the show says we want that episode then add it to the list - if not showObj.wantEpisode(curSeason, curEp, curQuality, forced_search, downCurQuality): - logger.log('Ignoring {0}'.format(curResult[b'name']), logger.DEBUG) + if not show_obj.wantEpisode(cur_season, cur_ep, cur_quality, forced_search, downCurQuality): + logger.log('Ignoring {0}'.format(cur_result[b'name']), logger.DEBUG) continue - epObj = showObj.getEpisode(curSeason, curEp) + ep_obj = show_obj.getEpisode(cur_season, cur_ep) # build a result object - title = curResult[b'name'] - url = curResult[b'url'] + title = cur_result[b'name'] + url = cur_result[b'url'] logger.log('Found result {0} at {1}'.format(title, url)) - result = self.provider.get_result([epObj]) - result.show = showObj + result = self.provider.get_result([ep_obj]) + result.show = show_obj result.url = url - result.seeders = curResult[b'seeders'] - result.leechers = curResult[b'leechers'] - result.size = curResult[b'size'] - result.pubdate = curResult[b'pubdate'] - result.hash = curResult[b'hash'] + result.seeders = cur_result[b'seeders'] + result.leechers = cur_result[b'leechers'] + result.size = cur_result[b'size'] + result.pubdate = cur_result[b'pubdate'] + result.hash = cur_result[b'hash'] result.name = title - result.quality = curQuality - result.release_group = curReleaseGroup - result.version = curVersion + result.quality = cur_quality + result.release_group = cur_release_group + result.version = cur_version result.content = None # add it to the list - if epObj not in neededEps: - neededEps[epObj] = [result] + if ep_obj not in needed_eps: + needed_eps[ep_obj] = [result] else: - neededEps[epObj].append(result) + needed_eps[ep_obj].append(result) # datetime stamp this search so cache gets cleared self.setLastSearch() - return neededEps + return needed_eps From e1fa30f5da96cf5c1f3d6aab5dea2573975b5882 Mon Sep 17 00:00:00 2001 From: Dario Date: Tue, 21 Jun 2016 22:40:39 +0200 Subject: [PATCH 091/134] Quick fix for BTN --- sickbeard/providers/btn.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py index 73d3caa412..0e399cc872 100644 --- a/sickbeard/providers/btn.py +++ b/sickbeard/providers/btn.py @@ -87,7 +87,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint:disable=too-many search_params.update(search_strings) logger.log('Search string: {0}'.format(search_strings), logger.DEBUG) - parsed_json = self._api_call(self.apikey, search_params) + parsed_json = self._api_call(self.api_key, search_params) if not parsed_json: logger.log('No data returned from provider', logger.DEBUG) return results @@ -110,7 +110,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint:disable=too-many # +1 because range(1,4) = 1, 2, 3 for page in range(1, pages_needed + 1): - parsed_json = self._api_call(self.apikey, search_params, results_per_page, page * results_per_page) + parsed_json = self._api_call(self.api_key, search_params, results_per_page, page * results_per_page) # Note that these are individual requests and might time out individually. # This would result in 'gaps' in the results. There is no way to fix this though. if 'torrents' in parsed_json: From f1cea625f994367f1988f7011903fb61c84d385d Mon Sep 17 00:00:00 2001 From: Dario Date: Wed, 22 Jun 2016 12:24:29 +0200 Subject: [PATCH 092/134] Set anime_only for AnimeBytes --- sickbeard/providers/animebytes.py | 1 + 1 file changed, 1 insertion(+) diff --git a/sickbeard/providers/animebytes.py b/sickbeard/providers/animebytes.py index 10a59400f5..916ad68869 100644 --- a/sickbeard/providers/animebytes.py +++ b/sickbeard/providers/animebytes.py @@ -65,6 +65,7 @@ def __init__(self): self.proper_strings = [] # Miscellaneous Options + self.anime_only = True # Torrent Stats self.minseed = None From f871bd79ef637531db1061d59e5ce71d4688da20 Mon Sep 17 00:00:00 2001 From: Labrys Date: Wed, 22 Jun 2016 10:10:19 -0400 Subject: [PATCH 093/134] Fix support for qbittorrent >= 3.3.5 Implements fix from SickRage/SickRage#1996 for issue SickRage/SickRage#1990 All credits go to @ngodber --- sickbeard/clients/qbittorrent_client.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/sickbeard/clients/qbittorrent_client.py b/sickbeard/clients/qbittorrent_client.py index 14b848453f..131dd06837 100644 --- a/sickbeard/clients/qbittorrent_client.py +++ b/sickbeard/clients/qbittorrent_client.py @@ -93,7 +93,10 @@ def _set_torrent_label(self, result): label = sickbeard.TORRENT_LABEL_ANIME if result.show.is_anime else sickbeard.TORRENT_LABEL if self.api > 6 and label: - self.url = '{host}command/setLabel'.format(host=self.host) + self.url = '{host}command/{cmd}'.format( + host=self.host, + cmd='setCategory' if self.api >= 10 else 'setLabel' + ) data = { 'hashes': result.hash.lower(), 'label': label.replace(' ', '_'), From 23ad50282433aacad584ee56589996b6ef8dbe2b Mon Sep 17 00:00:00 2001 From: Labrys Date: Wed, 22 Jun 2016 11:59:17 -0400 Subject: [PATCH 094/134] Fix support for qbittorrent >= 3.3.5 --- sickbeard/clients/qbittorrent_client.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/sickbeard/clients/qbittorrent_client.py b/sickbeard/clients/qbittorrent_client.py index 131dd06837..15242171dd 100644 --- a/sickbeard/clients/qbittorrent_client.py +++ b/sickbeard/clients/qbittorrent_client.py @@ -93,13 +93,14 @@ def _set_torrent_label(self, result): label = sickbeard.TORRENT_LABEL_ANIME if result.show.is_anime else sickbeard.TORRENT_LABEL if self.api > 6 and label: - self.url = '{host}command/{cmd}'.format( + label_key = 'Category' if self.api >= 10 else 'Label' + self.url = '{host}command/set{key}'.format( host=self.host, - cmd='setCategory' if self.api >= 10 else 'setLabel' + key=label_key, ) data = { 'hashes': result.hash.lower(), - 'label': label.replace(' ', '_'), + label_key.lower(): label.replace(' ', '_'), } return self._request(method='post', data=data, cookies=self.session.cookies) return None From 7ee057ca6c0a46b35fbff38e5fff93dd69f54c4d Mon Sep 17 00:00:00 2001 From: Dario Date: Wed, 22 Jun 2016 20:04:58 +0200 Subject: [PATCH 095/134] Better non-logged string --- sickbeard/providers/animebytes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sickbeard/providers/animebytes.py b/sickbeard/providers/animebytes.py index 916ad68869..f3c508ef6d 100644 --- a/sickbeard/providers/animebytes.py +++ b/sickbeard/providers/animebytes.py @@ -319,7 +319,7 @@ def login(self): logger.log('Unable to connect to provider', logger.WARNING) return False - if re.search('Login incorrect. Only perfect spellers may enter this system!', response): + if re.search('You will be banned for 6 hours after your login attempts run out.', response): logger.log('Invalid username or password. Check your settings', logger.WARNING) self.session.cookies.clear() return False From 4784949fd0cf63d6d5ad1c65705250f44a030d7e Mon Sep 17 00:00:00 2001 From: p0ps Date: Fri, 24 Jun 2016 20:44:26 +0200 Subject: [PATCH 096/134] Updated to latest simpleanidb lib dev version (#711) * Updated to latest simpleanidb dev version. We maybe need to start keep track of versions here? * Removed unneeded imports and unnecessary newline --- lib/simpleanidb/__init__.py | 85 +++++++++++++++++++++++++------------ lib/simpleanidb/models.py | 5 --- 2 files changed, 59 insertions(+), 31 deletions(-) diff --git a/lib/simpleanidb/__init__.py b/lib/simpleanidb/__init__.py index d807afe281..95829751dd 100644 --- a/lib/simpleanidb/__init__.py +++ b/lib/simpleanidb/__init__.py @@ -14,10 +14,14 @@ __version__ = "0.1.0" __author__ = "Dennis Lutter" -ANIME_LIST_URL = "http://anidb.net/api/anime-titles.xml.gz" +# Get this file directly from anidb batch import api +ANIME_TITLES_URL = "http://anidb.net/api/anime-titles.xml.gz" -ANIDB_URL = \ - "http://api.anidb.net:9001/httpapi" +# Get this file from ScudLee's managed anidb lists +ANIME_LIST_URL = "https://raw.githubusercontent.com/ScudLee/anime-lists/master/anime-list.xml" + +# Url for the anidb http api +ANIDB_URL = "http://api.anidb.net:9001/httpapi" # Request list Types REQUEST_CATEGORY_LIST = "categorylist" @@ -41,10 +45,13 @@ def __init__(self, session=None, cache_dir=None, auto_download=True, lang=None): self.session = session or requests.Session() self.session.headers.setdefault('user-agent', 'simpleanidb/{0}.{1}.{2}'.format(*__version__)) - self.anime_list_path = os.path.join( + self.anime_titles_path = os.path.join( self._cache_dir, "anime-titles.xml.gz") + self.anime_list_path = os.path.join( + self._cache_dir, "anime-list.xml.gz") self.auto_download = auto_download - self._xml = None + self._xml_titles = self._xml = None + self._xml_list = None self.lang = lang if not lang: self.lang = "en" @@ -68,24 +75,50 @@ def _get_temp_dir(self): return path - def search(self, term, autoload=False): - if not self._xml: - try: - self._xml = self._read_file(self.anime_list_path) - except IOError: - if self.auto_download: - self.download_anime_list() - self._xml = self._read_file(self.anime_list_path) - else: - raise - - term = term.lower() + def _load_xml(self, url): + local_file = os.path.join(self._cache_dir, url.split('/')[-1]) + xml = None + try: + xml = self._read_file(local_file) + except IOError: + if self.auto_download: + self.download_anime_list(local_file, url) + xml = self._read_file(local_file) + else: + raise + return xml + + def search(self, term=None, autoload=False, aid=None, tvdbid=None): + if not self._xml_list: + self._xml_list = self._load_xml(ANIME_LIST_URL) + + if not self._xml_titles: + self._xml_titles = self._load_xml(ANIME_TITLES_URL) + anime_ids = [] - for anime in self._xml.findall("anime"): - for title in anime.findall("title"): - if term in title.text.lower(): - anime_ids.append((int(anime.get("aid")), anime)) - break + if term: + for anime in self._xml_titles.findall("anime"): + term = term.lower() + for title in anime.findall("title"): + if term in title.text.lower(): + anime_ids.append((int(anime.get("aid")), anime)) + break + else: + if aid: + for anime in self._xml_list.findall("anime"): + if aid == int(anime.attrib.get('anidbid')): + anime_ids.append((int(anime.attrib.get('anidbid')), anime)) + break + + elif tvdbid: + for anime in self._xml_list.findall("anime"): + try: + if tvdbid == int(anime.attrib.get('tvdbid')): + anime_ids.append((int(anime.attrib.get('anidbid')), anime)) + break + except: + continue + return [Anime(self, aid, autoload, xml_node) for aid, xml_node in anime_ids] def anime(self, aid): @@ -95,13 +128,13 @@ def _read_file(self, path): f = open(path, 'rb') return etree.ElementTree(file=f) - def download_anime_list(self, force=False): - if not force and os.path.exists(self.anime_list_path): + def download_anime_list(self, anime_list_path, anidb_archive_url, force=False): + if not force and os.path.exists(anime_list_path): modified_date = datetime.fromtimestamp( - os.path.getmtime(self.anime_list_path)) + os.path.getmtime(anime_list_path)) if modified_date + timedelta(1) > datetime.now(): return False - return download_file(self.anime_list_path, ANIME_LIST_URL) + return download_file(anime_list_path, anidb_archive_url) def get_list(self, request_type): """Retrieve a lists of animes from anidb.info diff --git a/lib/simpleanidb/models.py b/lib/simpleanidb/models.py index 93704e4ac8..7af01b33a0 100644 --- a/lib/simpleanidb/models.py +++ b/lib/simpleanidb/models.py @@ -1,9 +1,4 @@ from __future__ import absolute_import -import requests -import xml.etree.ElementTree as ET - -from .helper import date_to_date -from .exceptions import GeneralError class Anime(object): # pylint: disable=too-many-instance-attributes From d59050d4338a6ad8983e8f555509178fe84569af Mon Sep 17 00:00:00 2001 From: Fernando Date: Fri, 24 Jun 2016 15:49:54 -0300 Subject: [PATCH 097/134] Prevent more WARNING log messages from RARBG. Make it DEBUG (#710) * Prevent more WARNING log messages from RARBG. Make it DEBUG * Remove superfluous dot --- sickbeard/providers/rarbg.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sickbeard/providers/rarbg.py b/sickbeard/providers/rarbg.py index 7261c49f98..187bb9d98e 100644 --- a/sickbeard/providers/rarbg.py +++ b/sickbeard/providers/rarbg.py @@ -139,10 +139,10 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if error_code == 5: # 5 = Too many requests per second logger.log('{0}. Error code: {1}'.format(error, error_code), logger.INFO) - elif error_code not in (14, 20): - # 14 = Cant find thetvdb in database. Are you sure this thetvdb exists? + elif error_code not in (8, 10, 12, 14, 20): + # 8, 10, 12, 14 = Cant find * in database. Are you sure this * exists? # 20 = No results found - logger.log('{0}. Error code: {1}'.format(error, error_code), logger.WARNING) + logger.log('{0} Error code: {1}'.format(error, error_code), logger.WARNING) continue torrent_results = data.get('torrent_results') From 96a2365a3acb4de5855f483b6e1069351fe502e4 Mon Sep 17 00:00:00 2001 From: Labrys Date: Tue, 7 Jun 2016 20:54:37 -0400 Subject: [PATCH 098/134] Add missing qualities to API show commands --- sickbeard/server/api/core.py | 125 ++++++++++------------------------- 1 file changed, 35 insertions(+), 90 deletions(-) diff --git a/sickbeard/server/api/core.py b/sickbeard/server/api/core.py index f4da89be53..2405711665 100644 --- a/sickbeard/server/api/core.py +++ b/sickbeard/server/api/core.py @@ -23,6 +23,7 @@ # pylint: disable=line-too-long,too-many-lines,abstract-method # pylint: disable=no-member,method-hidden,missing-docstring,invalid-name +from collections import OrderedDict from datetime import datetime, date import io import json @@ -514,33 +515,40 @@ def _ordinal_to_datetime_form(ordinal): return _ordinal_to_date(ordinal, dateTimeFormat) +quality_map = OrderedDict(( + ('sdtv', Quality.SDTV), + ('sddvd', Quality.SDDVD), + ('hdtv', Quality.HDTV), + ('rawhdtv', Quality.RAWHDTV), + ('fullhdtv', Quality.FULLHDTV), + ('hdwebdl', Quality.HDWEBDL), + ('fullhdwebdl', Quality.FULLHDWEBDL), + ('hdbluray', Quality.HDBLURAY), + ('fullhdbluray', Quality.FULLHDBLURAY), + ('uhd_4k_tv', Quality.UHD_4K_TV), + ('uhd_4k_webdl', Quality.UHD_4K_WEBDL), + ("uhd_4k_bluray", Quality.UHD_4K_BLURAY), + ('uhd_8k_tv', Quality.UHD_8K_TV), + ('uhd_8k_webdl', Quality.UHD_8K_WEBDL), + ("uhd_8k_bluray", Quality.UHD_8K_BLURAY), + ('unknown', Quality.UNKNOWN), +)) + + def _map_quality(show_obj): - quality_map = _get_quality_map() + mapped_quality = {v: k for k, v in quality_map.items()} - any_qualities = [] - best_qualities = [] + allowed_qualities = [] + preferred_qualities = [] i_quality_id, a_quality_id = Quality.splitQuality(int(show_obj)) if i_quality_id: for quality in i_quality_id: - any_qualities.append(quality_map[quality]) + allowed_qualities.append(mapped_quality[quality]) if a_quality_id: for quality in a_quality_id: - best_qualities.append(quality_map[quality]) - return any_qualities, best_qualities - - -def _get_quality_map(): - return {Quality.SDTV: 'sdtv', - Quality.SDDVD: 'sddvd', - Quality.HDTV: 'hdtv', - Quality.RAWHDTV: 'rawhdtv', - Quality.FULLHDTV: 'fullhdtv', - Quality.HDWEBDL: 'hdwebdl', - Quality.FULLHDWEBDL: 'fullhdwebdl', - Quality.HDBLURAY: 'hdbluray', - Quality.FULLHDBLURAY: 'fullhdbluray', - Quality.UNKNOWN: 'unknown'} + preferred_qualities.append(mapped_quality[quality]) + return allowed_qualities, preferred_qualities def _get_root_dirs(): @@ -1759,12 +1767,8 @@ class CMD_SickBeardSetDefaults(ApiCall): def __init__(self, args, kwargs): # required # optional - self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list", - ["sdtv", "sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", - "fullhdwebdl", "hdbluray", "fullhdbluray", "unknown"]) - self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", - ["sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", - "fullhdwebdl", "hdbluray", "fullhdbluray"]) + self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list", list(quality_map)) + self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", list(quality_map).remove('unknown')) self.future_show_paused, args = self.check_params(args, kwargs, "future_show_paused", None, False, "bool", []) self.flatten_folders, args = self.check_params(args, kwargs, "flatten_folders", None, False, "bool", []) self.status, args = self.check_params(args, kwargs, "status", None, False, "string", @@ -1775,17 +1779,6 @@ def __init__(self, args, kwargs): def run(self): """ Set Medusa's user default configuration value """ - quality_map = {'sdtv': Quality.SDTV, - 'sddvd': Quality.SDDVD, - 'hdtv': Quality.HDTV, - 'rawhdtv': Quality.RAWHDTV, - 'fullhdtv': Quality.FULLHDTV, - 'hdwebdl': Quality.HDWEBDL, - 'fullhdwebdl': Quality.FULLHDWEBDL, - 'hdbluray': Quality.HDBLURAY, - 'fullhdbluray': Quality.FULLHDBLURAY, - 'unknown': Quality.UNKNOWN} - i_quality_id = [] a_quality_id = [] @@ -1977,12 +1970,8 @@ def __init__(self, args, kwargs): self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "", []) self.location, args = self.check_params(args, kwargs, "location", None, True, "string", []) # optional - self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list", - ["sdtv", "sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", - "fullhdwebdl", "hdbluray", "fullhdbluray", "unknown"]) - self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", - ["sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", - "fullhdwebdl", "hdbluray", "fullhdbluray"]) + self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list", list(quality_map)) + self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", list(quality_map).remove('unknown')) self.flatten_folders, args = self.check_params(args, kwargs, "flatten_folders", bool(sickbeard.FLATTEN_FOLDERS_DEFAULT), False, "bool", []) self.subtitles, args = self.check_params(args, kwargs, "subtitles", int(sickbeard.USE_SUBTITLES), @@ -2014,17 +2003,6 @@ def run(self): # set indexer so we can pass it along when adding show to SR indexer = indexer_result['data']['results'][0]['indexer'] - quality_map = {'sdtv': Quality.SDTV, - 'sddvd': Quality.SDDVD, - 'hdtv': Quality.HDTV, - 'rawhdtv': Quality.RAWHDTV, - 'fullhdtv': Quality.FULLHDTV, - 'hdwebdl': Quality.HDWEBDL, - 'fullhdwebdl': Quality.FULLHDWEBDL, - 'hdbluray': Quality.HDBLURAY, - 'fullhdbluray': Quality.FULLHDBLURAY, - 'unknown': Quality.UNKNOWN} - # use default quality as a fail-safe new_quality = int(sickbeard.QUALITY_DEFAULT) i_quality_id = [] @@ -2077,12 +2055,8 @@ def __init__(self, args, kwargs): self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", []) # optional self.location, args = self.check_params(args, kwargs, "location", None, False, "string", []) - self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list", - ["sdtv", "sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", - "fullhdwebdl", "hdbluray", "fullhdbluray", "unknown"]) - self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", - ["sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", - "fullhdwebdl", "hdbluray", "fullhdbluray"]) + self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list", list(quality_map)) + self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", list(quality_map).remove('unknown')) self.flatten_folders, args = self.check_params(args, kwargs, "flatten_folders", bool(sickbeard.FLATTEN_FOLDERS_DEFAULT), False, "bool", []) self.status, args = self.check_params(args, kwargs, "status", None, False, "string", @@ -2119,17 +2093,6 @@ def run(self): if not ek(os.path.isdir, self.location): return _responds(RESULT_FAILURE, msg="'" + self.location + "' is not a valid location") - quality_map = {'sdtv': Quality.SDTV, - 'sddvd': Quality.SDDVD, - 'hdtv': Quality.HDTV, - 'rawhdtv': Quality.RAWHDTV, - 'fullhdtv': Quality.FULLHDTV, - 'hdwebdl': Quality.HDWEBDL, - 'fullhdwebdl': Quality.FULLHDWEBDL, - 'hdbluray': Quality.HDBLURAY, - 'fullhdbluray': Quality.FULLHDBLURAY, - 'unknown': Quality.UNKNOWN} - # use default quality as a fail-safe new_quality = int(sickbeard.QUALITY_DEFAULT) i_quality_id = [] @@ -2618,15 +2581,8 @@ def __init__(self, args, kwargs): # required self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", []) # optional - # this for whatever reason removes hdbluray not sdtv... which is just wrong. reverting to previous code.. plus we didnt use the new code everywhere. - # self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", _get_quality_map().values()[1:]) - self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list", - ["sdtv", "sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", - "fullhdwebdl", "hdbluray", "fullhdbluray", "unknown"]) - self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", - ["sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", - "fullhdwebdl", - "hdbluray", "fullhdbluray"]) + self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list", list(quality_map)) + self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", list(quality_map).remove('unknown')) # super, missing, help ApiCall.__init__(self, args, kwargs) @@ -2636,17 +2592,6 @@ def run(self): if not show_obj: return _responds(RESULT_FAILURE, msg="Show not found") - quality_map = {'sdtv': Quality.SDTV, - 'sddvd': Quality.SDDVD, - 'hdtv': Quality.HDTV, - 'rawhdtv': Quality.RAWHDTV, - 'fullhdtv': Quality.FULLHDTV, - 'hdwebdl': Quality.HDWEBDL, - 'fullhdwebdl': Quality.FULLHDWEBDL, - 'hdbluray': Quality.HDBLURAY, - 'fullhdbluray': Quality.FULLHDBLURAY, - 'unknown': Quality.UNKNOWN} - # use default quality as a fail-safe new_quality = int(sickbeard.QUALITY_DEFAULT) i_quality_id = [] From 18245413842b31ecde0a78241a6e03c344f102be Mon Sep 17 00:00:00 2001 From: Dario Date: Sat, 25 Jun 2016 16:43:31 +0200 Subject: [PATCH 099/134] Make BTN more decent (#712) * Make BTN more decent * Clean up a bit --- sickbeard/providers/btn.py | 195 ++++++++++++++++--------------------- 1 file changed, 84 insertions(+), 111 deletions(-) diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py index 0e399cc872..0ccaab2a3b 100644 --- a/sickbeard/providers/btn.py +++ b/sickbeard/providers/btn.py @@ -19,20 +19,18 @@ from __future__ import unicode_literals import jsonrpclib -import math import socket import time -from six import iteritems - import sickbeard -from datetime import datetime -from sickbeard import classes, logger, scene_exceptions, tvcache +from six import iteritems + +from sickbeard import logger, scene_exceptions, tvcache from sickbeard.common import cpu_presets from sickbeard.helpers import sanitizeSceneName from sickrage.helper.common import episode_num -from sickrage.helper.exceptions import AuthException, ex +from sickrage.helper.exceptions import ex from sickrage.providers.torrent.TorrentProvider import TorrentProvider @@ -50,18 +48,20 @@ def __init__(self): self.url = 'http://broadcasthe.net/' self.urls = { 'base_url': 'http://api.btnapps.net', - 'website': self.url, } # Proper Strings + self.proper_strings = [] # Miscellaneous Options self.supports_absolute_numbering = True # Torrent Stats + self.minseed = None + self.minleech = None # Cache - self.cache = BTNCache(self, min_time=15) # Only poll BTN every 15 minutes max + self.cache = tvcache.TVCache(self, min_time=15) # Only poll BTN every 15 minutes max def search(self, search_strings, age=0, ep_obj=None): # pylint:disable=too-many-locals """ @@ -73,78 +73,91 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint:disable=too-many :returns: A list of search results (structure) """ results = [] - self._check_auth() + if not self._check_auth(): + return results # Search Params - search_params = { - } + search_params = {'age': '<=10800'} # Results from the past 3 hours - # age in seconds - if age: - search_params['age'] = '<=' + str(int(age)) + for mode in search_strings: + items = [] + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) - if search_strings: - search_params.update(search_strings) - logger.log('Search string: {0}'.format(search_strings), logger.DEBUG) + if mode != 'RSS': + search_params = self._episode_search_params(ep_obj) + logger.log('Search string: {search}'.format + (search=search_params), logger.DEBUG) - parsed_json = self._api_call(self.api_key, search_params) - if not parsed_json: - logger.log('No data returned from provider', logger.DEBUG) - return results + if mode == 'Season': + search_params = self._season_search_params(ep_obj) - if self._check_auth_from_data(parsed_json): + response = self._api_call(self.api_key, search_params) + if not response: + logger.log('No data returned from provider', logger.DEBUG) + return results - found_torrents = parsed_json.get('torrents', {}) + if not self._check_auth_from_data(response): + return results - # We got something, we know the API sends max 1000 results at a time. - # See if there are more than 1000 results for our query, if not we - # keep requesting until we've got everything. - # max 150 requests per hour so limit at that. Scan every 15 minutes. 60 / 15 = 4. - max_pages = 150 - results_per_page = 1000 + found_torrents = response.get('torrents', {}) - if 'results' in parsed_json and int(parsed_json['results']) >= results_per_page: - pages_needed = int(math.ceil(int(parsed_json['results']) / results_per_page)) - if pages_needed > max_pages: - pages_needed = max_pages + for _, torrent_info in iteritems(found_torrents): + (title, download_url) = self._process_title_and_url(torrent_info) - # +1 because range(1,4) = 1, 2, 3 - for page in range(1, pages_needed + 1): - parsed_json = self._api_call(self.api_key, search_params, results_per_page, page * results_per_page) - # Note that these are individual requests and might time out individually. - # This would result in 'gaps' in the results. There is no way to fix this though. - if 'torrents' in parsed_json: - found_torrents.update(parsed_json['torrents']) + if not all([title, download_url]): + continue - for _, torrent_info in iteritems(found_torrents): - (title, url) = self._get_title_and_url(torrent_info) + seeders = torrent_info.get('Seeders', 1) + leechers = torrent_info.get('Leechers', 0) + + # Filter unseeded torrent + + if seeders < min(self.minseed, 1): + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format + (title, seeders), logger.DEBUG) + continue + + size = torrent_info.get('Size') or -1 + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None, + } + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) + + items.append(item) - if title and url: - logger.log('Found result: {0}'.format(title), logger.DEBUG) - results.append(torrent_info) + results += items - # FIXME SORT RESULTS return results def _check_auth(self): + if not self.api_key: - logger.log('Invalid api key. Check your settings', logger.WARNING) + logger.log('Missing API key. Check your settings', logger.WARNING) + return False return True - def _check_auth_from_data(self, parsed_json): - - if parsed_json is None: - return self._check_auth() + @staticmethod + def _check_auth_from_data(parsed_json): if 'api-error' in parsed_json: - logger.log('Incorrect authentication credentials: {0}'.format(parsed_json['api-error']), logger.DEBUG) - raise AuthException('Your authentication credentials for {0} are missing,' - ' check your config.'.format(self.name)) + logger.log('Incorrect authentication credentials: {0}'.format + (parsed_json['api-error']), logger.DEBUG) + return False return True - def _get_title_and_url(self, parsed_json): + @staticmethod + def _process_title_and_url(parsed_json): # The BTN API gives a lot of information in response, # however SickRage is built mostly around Scene or @@ -173,32 +186,14 @@ def _get_title_and_url(self, parsed_json): if 'DownloadURL' in parsed_json: url = parsed_json['DownloadURL'] if url: - # unescaped / is valid in JSON, but it can be escaped + # Unescaped / is valid in JSON, but it can be escaped url = url.replace('\\/', '/') return title, url - def find_propers(self, search_date=None): - results = [] - - search_terms = ['%.proper.%', '%.repack.%'] - - for term in search_terms: - for item in self.search({'release': term}, age=4 * 24 * 60 * 60): - if item['Time']: - try: - result_date = datetime.fromtimestamp(float(item['Time'])) - except TypeError: - result_date = None - - if result_date: - if not search_date or result_date > search_date: - title, url = self._get_title_and_url(item) - results.append(classes.Proper(title, url, result_date, self.show)) - - return results + @staticmethod + def _season_search_params(ep_obj): - def _get_season_search_strings(self, ep_obj): search_params = [] current_params = {'category': 'Season'} @@ -211,7 +206,7 @@ def _get_season_search_strings(self, ep_obj): else: current_params['name'] = 'Season ' + str(ep_obj.scene_season) - # search + # Search if ep_obj.show.indexer == 1: current_params['tvdb'] = ep_obj.show.indexerid search_params.append(current_params) @@ -225,7 +220,8 @@ def _get_season_search_strings(self, ep_obj): return search_params - def _get_episode_search_strings(self, ep_obj, add_string=''): + @staticmethod + def _episode_search_params(ep_obj): if not ep_obj: return [{}] @@ -233,7 +229,7 @@ def _get_episode_search_strings(self, ep_obj, add_string=''): to_return = [] search_params = {'category': 'Episode'} - # episode + # Episode if ep_obj.show.air_by_date or ep_obj.show.sports: date_str = str(ep_obj.airdate) @@ -246,12 +242,12 @@ def _get_episode_search_strings(self, ep_obj, add_string=''): # Do a general name search for the episode, formatted like SXXEYY search_params['name'] = '{ep}'.format(ep=episode_num(ep_obj.scene_season, ep_obj.scene_episode)) - # search + # Search if ep_obj.show.indexer == 1: search_params['tvdb'] = ep_obj.show.indexerid to_return.append(search_params) else: - # add new query string for every exception + # Add new query string for every exception name_exceptions = list( set(scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name])) for cur_exception in name_exceptions: @@ -260,7 +256,7 @@ def _get_episode_search_strings(self, ep_obj, add_string=''): return to_return - def _api_call(self, apikey, params=None, results_per_page=1000, offset=0): + def _api_call(self, apikey, params=None, results_per_page=300, offset=0): server = jsonrpclib.Server(self.urls['base_url']) parsed_json = {} @@ -274,7 +270,8 @@ def _api_call(self, apikey, params=None, results_per_page=1000, offset=0): logger.log('You have exceeded the limit of 150 calls per hour,' ' per API key which is unique to your user account', logger.WARNING) else: - logger.log('JSON-RPC protocol error while accessing provider. Error: {msg!r}'.format(msg=error), logger.ERROR) + logger.log('JSON-RPC protocol error while accessing provider. Error: {msg!r}'.format + (msg=error), logger.ERROR) parsed_json = {'api-error': ex(error)} return parsed_json @@ -283,41 +280,17 @@ def _api_call(self, apikey, params=None, results_per_page=1000, offset=0): except socket.error as error: # Note that sometimes timeouts are thrown as socket errors - logger.log('Socket error while accessing provider. Error: {msg}'.format(msg=error[1]), logger.WARNING) + logger.log('Socket error while accessing provider. Error: {msg}'.format + (msg=error[1]), logger.WARNING) except Exception as error: errorstring = str(error) if errorstring.startswith('<') and errorstring.endswith('>'): errorstring = errorstring[1:-1] - logger.log('Unknown error while accessing provider. Error: {msg}'.format(msg=errorstring), logger.WARNING) + logger.log('Unknown error while accessing provider. Error: {msg}'.format + (msg=errorstring), logger.WARNING) return parsed_json - def _do_general_search(self, search_string): - # 'search' looks as broad is it can find. Can contain episode overview and title for example, - # use with caution! - return self.search({'search': search_string}) - - -class BTNCache(tvcache.TVCache): - def _getRSSData(self): - # Get the torrents uploaded since last check. - seconds_since_last_update = math.ceil(time.time() - time.mktime(self._getLastUpdate().timetuple())) - - # default to 15 minutes - seconds_min_time = self.minTime * 60 - if seconds_since_last_update < seconds_min_time: - seconds_since_last_update = seconds_min_time - - # Set maximum to 24 hours (24 * 60 * 60 = 86400 seconds) of 'RSS' data search, older things will need to be done through backlog - if seconds_since_last_update > 86400: - logger.log( - 'The last known successful update was more than 24 hours ago, only trying to fetch the last 24 hours!', - logger.DEBUG) - seconds_since_last_update = 86400 - - self.search_params = None # BTN cache does not use search params - return {'entries': self.provider.search(search_params=self.search_params, age=seconds_since_last_update)} - provider = BTNProvider() From 1dbf12cf428372a51ada332a68445812e6db55ff Mon Sep 17 00:00:00 2001 From: Labrys of Knossos Date: Sat, 25 Jun 2016 11:51:09 -0400 Subject: [PATCH 100/134] Remove fanart lib (#713) API has changed and lib is no longer maintained --- lib/fanart/__init__.py | 111 ---- lib/fanart/core.py | 35 -- lib/fanart/errors.py | 15 - lib/fanart/immutable.py | 46 -- lib/fanart/items.py | 68 -- lib/fanart/movie.py | 103 --- lib/fanart/music.py | 80 --- lib/fanart/tests/__init__.py | 3 - lib/fanart/tests/json/wilfred.json | 196 ------ lib/fanart/tests/response/50x50.png | Bin 180 -> 0 bytes lib/fanart/tests/response/movie_thg.json | 174 ------ lib/fanart/tests/response/music_a7f.json | 171 ----- lib/fanart/tests/response/tv_239761.json | 196 ------ lib/fanart/tests/response/tv_79349.json | 756 ----------------------- lib/fanart/tests/test_core.py | 23 - lib/fanart/tests/test_immutable.py | 49 -- lib/fanart/tests/test_items.py | 27 - lib/fanart/tests/test_movie.py | 21 - lib/fanart/tests/test_music.py | 22 - lib/fanart/tests/test_tv.py | 46 -- lib/fanart/tv.py | 108 ---- sickbeard/metadata/generic.py | 55 +- 22 files changed, 7 insertions(+), 2298 deletions(-) delete mode 100644 lib/fanart/__init__.py delete mode 100644 lib/fanart/core.py delete mode 100644 lib/fanart/errors.py delete mode 100644 lib/fanart/immutable.py delete mode 100644 lib/fanart/items.py delete mode 100644 lib/fanart/movie.py delete mode 100644 lib/fanart/music.py delete mode 100644 lib/fanart/tests/__init__.py delete mode 100644 lib/fanart/tests/json/wilfred.json delete mode 100644 lib/fanart/tests/response/50x50.png delete mode 100644 lib/fanart/tests/response/movie_thg.json delete mode 100644 lib/fanart/tests/response/music_a7f.json delete mode 100644 lib/fanart/tests/response/tv_239761.json delete mode 100644 lib/fanart/tests/response/tv_79349.json delete mode 100644 lib/fanart/tests/test_core.py delete mode 100644 lib/fanart/tests/test_immutable.py delete mode 100644 lib/fanart/tests/test_items.py delete mode 100644 lib/fanart/tests/test_movie.py delete mode 100644 lib/fanart/tests/test_music.py delete mode 100644 lib/fanart/tests/test_tv.py delete mode 100644 lib/fanart/tv.py diff --git a/lib/fanart/__init__.py b/lib/fanart/__init__.py deleted file mode 100644 index 8acc7f5cd6..0000000000 --- a/lib/fanart/__init__.py +++ /dev/null @@ -1,111 +0,0 @@ -__author__ = 'Andrea De Marco <24erre@gmail.com>' -__version__ = '1.4.0' -__classifiers__ = [ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: Apache Software License', - 'Operating System :: OS Independent', - 'Programming Language :: Python', - 'Topic :: Internet :: WWW/HTTP', - 'Topic :: Software Development :: Libraries', -] -__copyright__ = "2012, %s " % __author__ -__license__ = """ - Copyright %s. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either expressed or implied. - See the License for the specific language governing permissions and - limitations under the License. -""" % __copyright__ - -__docformat__ = 'restructuredtext en' - -__doc__ = """ -:abstract: Python interface to fanart.tv API -:version: %s -:author: %s -:contact: http://z4r.github.com/ -:date: 2012-04-04 -:copyright: %s -""" % (__version__, __author__, __license__) - - -def values(obj): - return [v for k, v in obj.__dict__.iteritems() if not k.startswith('_')] - -BASEURL = 'http://webservice.fanart.tv/v3/%s/%s?api_key=%s' - -class FORMAT(object): - JSON = 'JSON' - XML = 'XML' - PHP = 'PHP' - - -class WS(object): - MUSIC = 'music' - MOVIE = 'movies' - TV = 'tv' - - -class TYPE(object): - ALL = 'all' - - class TV(object): - LOGO = 'clearlogo' - CHARACTER = 'characterart' - BACKGROUND = 'showbackground' - HDLOGO = 'hdtvlogo' - HDART = 'hdclearart' - ART = 'clearart' - THUMB = 'tvthumb' - POSTER = 'tvposter' - BANNER = 'tvbanner' - SEASONTHUMB = 'seasonthumb' - SEASONPOSTER = 'seasonposter' - SEASONBANNER = 'seasonbanner' - - class MUSIC(object): - DISC = 'cdart' - LOGO = 'musiclogo' - BACKGROUND = 'artistbackground' - COVER = 'albumcover' - THUMB = 'artistthumb' - - class MOVIE(object): - ART = 'movieart' - LOGO = 'movielogo' - DISC = 'moviedisc' - POSTER = 'movieposter' - BACKGROUND = 'moviebackground' - HDLOGO = 'hdmovielogo' - HDART = 'hdmovieclearart' - BANNER = 'moviebanner' - THUMB = 'moviethumb' - - -class SORT(object): - POPULAR = 1 - NEWEST = 2 - OLDEST = 3 - - -class LIMIT(object): - ONE = 1 - ALL = 2 - -FORMAT_LIST = values(FORMAT) -WS_LIST = values(WS) -TYPE_LIST = values(TYPE.MUSIC) + values(TYPE.TV) + values(TYPE.MOVIE) + [TYPE.ALL] -MUSIC_TYPE_LIST = values(TYPE.MUSIC) + [TYPE.ALL] -TV_TYPE_LIST = values(TYPE.TV) + [TYPE.ALL] -MOVIE_TYPE_LIST = values(TYPE.MOVIE) + [TYPE.ALL] -SORT_LIST = values(SORT) -LIMIT_LIST = values(LIMIT) diff --git a/lib/fanart/core.py b/lib/fanart/core.py deleted file mode 100644 index 6b3af96d46..0000000000 --- a/lib/fanart/core.py +++ /dev/null @@ -1,35 +0,0 @@ -import requests -import fanart -from fanart.errors import RequestFanartError, ResponseFanartError - - -class Request(object): - def __init__(self, apikey, id, ws, type=None, sort=None, limit=None): - self._apikey = apikey - self._id = id - self._ws = ws - self._type = type or fanart.TYPE.ALL - self._sort = sort or fanart.SORT.POPULAR - self._limit = limit or fanart.LIMIT.ALL - self.validate() - self._response = None - - def validate(self): - for attribute_name in ('ws', 'type', 'sort', 'limit'): - attribute = getattr(self, '_' + attribute_name) - choices = getattr(fanart, attribute_name.upper() + '_LIST') - if attribute not in choices: - raise RequestFanartError('Not allowed {0}: {1} [{2}]'.format(attribute_name, attribute, ', '.join(choices))) - - def __str__(self): - return fanart.BASEURL % (self._ws, self._id, self._apikey) - - def response(self): - try: - response = requests.get(str(self)) - rjson = response.json() - if not isinstance(rjson, dict): - raise Exception(response.text) - return rjson - except Exception as e: - raise ResponseFanartError(str(e)) diff --git a/lib/fanart/errors.py b/lib/fanart/errors.py deleted file mode 100644 index 95a71e35ed..0000000000 --- a/lib/fanart/errors.py +++ /dev/null @@ -1,15 +0,0 @@ -class FanartError(Exception): - def __str__(self): - return ', '.join(map(str, self.args)) - - def __repr__(self): - name = self.__class__.__name__ - return '%s%r' % (name, self.args) - - -class ResponseFanartError(FanartError): - pass - - -class RequestFanartError(FanartError): - pass diff --git a/lib/fanart/immutable.py b/lib/fanart/immutable.py deleted file mode 100644 index 170de37086..0000000000 --- a/lib/fanart/immutable.py +++ /dev/null @@ -1,46 +0,0 @@ -class Immutable(object): - _mutable = False - - def __setattr__(self, name, value): - if self._mutable or name == '_mutable': - super(Immutable, self).__setattr__(name, value) - else: - raise TypeError("Can't modify immutable instance") - - def __delattr__(self, name): - if self._mutable: - super(Immutable, self).__delattr__(name) - else: - raise TypeError("Can't modify immutable instance") - - def __eq__(self, other): - return hash(self) == hash(other) - - def __hash__(self): - return hash(repr(self)) - - def __repr__(self): - return '%s(%s)' % ( - self.__class__.__name__, - ', '.join(['{0}={1}'.format(k, repr(v)) for k, v in self]) - ) - - def __iter__(self): - l = self.__dict__.keys() - l.sort() - for k in l: - if not k.startswith('_'): - yield k, getattr(self, k) - - @staticmethod - def mutablemethod(f): - def func(self, *args, **kwargs): - if isinstance(self, Immutable): - old_mutable = self._mutable - self._mutable = True - res = f(self, *args, **kwargs) - self._mutable = old_mutable - else: - res = f(self, *args, **kwargs) - return res - return func diff --git a/lib/fanart/items.py b/lib/fanart/items.py deleted file mode 100644 index 778e1a1b2b..0000000000 --- a/lib/fanart/items.py +++ /dev/null @@ -1,68 +0,0 @@ -import json -import os -import requests -from fanart.core import Request -from fanart.immutable import Immutable - - -class LeafItem(Immutable): - KEY = NotImplemented - - @Immutable.mutablemethod - def __init__(self, id, url, likes): - self.id = int(id) - self.url = url - self.likes = int(likes) - self._content = None - - @classmethod - def from_dict(cls, resource): - return cls(**dict([(str(k), v) for k, v in resource.iteritems()])) - - @classmethod - def extract(cls, resource): - return [cls.from_dict(i) for i in resource.get(cls.KEY, {})] - - @Immutable.mutablemethod - def content(self): - if not self._content: - self._content = requests.get(self.url).content - return self._content - - def __str__(self): - return self.url - - -class ResourceItem(Immutable): - WS = NotImplemented - request_cls = Request - - @classmethod - def from_dict(cls, map): - raise NotImplementedError - - @classmethod - def get(cls, id): - map = cls.request_cls( - apikey=os.environ.get('FANART_APIKEY'), - id=id, - ws=cls.WS - ).response() - return cls.from_dict(map) - - def json(self, **kw): - return json.dumps( - self, - default=lambda o: dict([(k, v) for k, v in o.__dict__.items() if not k.startswith('_')]), - **kw - ) - - -class CollectableItem(Immutable): - @classmethod - def from_dict(cls, key, map): - raise NotImplementedError - - @classmethod - def collection_from_dict(cls, map): - return [cls.from_dict(k, v) for k, v in map.iteritems()] diff --git a/lib/fanart/movie.py b/lib/fanart/movie.py deleted file mode 100644 index c69e860b1e..0000000000 --- a/lib/fanart/movie.py +++ /dev/null @@ -1,103 +0,0 @@ -import fanart -from fanart.items import LeafItem, Immutable, ResourceItem -__all__ = ( - 'ArtItem', - 'DiscItem', - 'LogoItem', - 'PosterItem', - 'BackgroundItem', - 'HdLogoItem', - 'HdArtItem', - 'BannerItem', - 'ThumbItem', - 'Movie', -) - - -class MovieItem(LeafItem): - - @Immutable.mutablemethod - def __init__(self, id, url, likes, lang): - super(MovieItem, self).__init__(id, url, likes) - self.lang = lang - - -class DiscItem(MovieItem): - KEY = fanart.TYPE.MOVIE.DISC - - @Immutable.mutablemethod - def __init__(self, id, url, likes, lang, disc, disc_type): - super(DiscItem, self).__init__(id, url, likes, lang) - self.disc = int(disc) - self.disc_type = disc_type - - -class ArtItem(MovieItem): - KEY = fanart.TYPE.MOVIE.ART - - -class LogoItem(MovieItem): - KEY = fanart.TYPE.MOVIE.LOGO - - -class PosterItem(MovieItem): - KEY = fanart.TYPE.MOVIE.POSTER - - -class BackgroundItem(MovieItem): - KEY = fanart.TYPE.MOVIE.BACKGROUND - - -class HdLogoItem(MovieItem): - KEY = fanart.TYPE.MOVIE.HDLOGO - - -class HdArtItem(MovieItem): - KEY = fanart.TYPE.MOVIE.HDART - - -class BannerItem(MovieItem): - KEY = fanart.TYPE.MOVIE.BANNER - - -class ThumbItem(MovieItem): - KEY = fanart.TYPE.MOVIE.THUMB - - -class Movie(ResourceItem): - WS = fanart.WS.MOVIE - - @Immutable.mutablemethod - def __init__(self, name, imdbid, tmdbid, arts, logos, discs, posters, backgrounds, hdlogos, hdarts, - banners, thumbs): - self.name = name - self.imdbid = imdbid - self.tmdbid = tmdbid - self.arts = arts - self.posters = posters - self.logos = logos - self.discs = discs - self.backgrounds = backgrounds - self.hdlogos = hdlogos - self.hdarts = hdarts - self.banners = banners - self.thumbs = thumbs - - @classmethod - def from_dict(cls, resource): - assert len(resource) == 1, 'Bad Format Map' - name, resource = resource.items()[0] - return cls( - name=name, - imdbid=resource['imdb_id'], - tmdbid=resource['tmdb_id'], - arts=ArtItem.extract(resource), - logos=LogoItem.extract(resource), - discs=DiscItem.extract(resource), - posters=PosterItem.extract(resource), - backgrounds=BackgroundItem.extract(resource), - hdlogos=HdLogoItem.extract(resource), - hdarts=HdArtItem.extract(resource), - banners=BannerItem.extract(resource), - thumbs=ThumbItem.extract(resource), - ) diff --git a/lib/fanart/music.py b/lib/fanart/music.py deleted file mode 100644 index 9f10309373..0000000000 --- a/lib/fanart/music.py +++ /dev/null @@ -1,80 +0,0 @@ -from fanart.items import Immutable, LeafItem, ResourceItem, CollectableItem -import fanart -__all__ = ( - 'BackgroundItem', - 'CoverItem', - 'LogoItem', - 'ThumbItem', - 'DiscItem', - 'Artist', - 'Album', -) - - -class BackgroundItem(LeafItem): - KEY = fanart.TYPE.MUSIC.BACKGROUND - - -class CoverItem(LeafItem): - KEY = fanart.TYPE.MUSIC.COVER - - -class LogoItem(LeafItem): - KEY = fanart.TYPE.MUSIC.LOGO - - -class ThumbItem(LeafItem): - KEY = fanart.TYPE.MUSIC.THUMB - - -class DiscItem(LeafItem): - KEY = fanart.TYPE.MUSIC.DISC - - @Immutable.mutablemethod - def __init__(self, id, url, likes, disc, size): - super(DiscItem, self).__init__(id, url, likes) - self.disc = int(disc) - self.size = int(size) - - -class Artist(ResourceItem): - WS = fanart.WS.MUSIC - - @Immutable.mutablemethod - def __init__(self, name, mbid, albums, backgrounds, logos, thumbs): - self.name = name - self.mbid = mbid - self.albums = albums - self.backgrounds = backgrounds - self.logos = logos - self.thumbs = thumbs - - @classmethod - def from_dict(cls, resource): - assert len(resource) == 1, 'Bad Format Map' - name, resource = resource.items()[0] - return cls( - name=name, - mbid=resource['mbid_id'], - albums=Album.collection_from_dict(resource.get('albums', {})), - backgrounds=BackgroundItem.extract(resource), - thumbs=ThumbItem.extract(resource), - logos=LogoItem.extract(resource), - ) - - -class Album(CollectableItem): - - @Immutable.mutablemethod - def __init__(self, mbid, covers, arts): - self.mbid = mbid - self.covers = covers - self.arts = arts - - @classmethod - def from_dict(cls, key, resource): - return cls( - mbid=key, - covers=CoverItem.extract(resource), - arts=DiscItem.extract(resource), - ) diff --git a/lib/fanart/tests/__init__.py b/lib/fanart/tests/__init__.py deleted file mode 100644 index 957cbe388c..0000000000 --- a/lib/fanart/tests/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -import os - -LOCALDIR = os.path.dirname(__file__) diff --git a/lib/fanart/tests/json/wilfred.json b/lib/fanart/tests/json/wilfred.json deleted file mode 100644 index 2065f9cfe0..0000000000 --- a/lib/fanart/tests/json/wilfred.json +++ /dev/null @@ -1,196 +0,0 @@ -{ - "logos": [ - { - "lang": "en", - "url": "http://assets.fanart.tv/fanart/tv/239761/clearlogo/wilfred-us-4e04b6495dfd3.png", - "likes": 2, - "id": 11977 - }, - { - "lang": "en", - "url": "http://assets.fanart.tv/fanart/tv/239761/clearlogo/wilfred-us-517ac36e39f67.png", - "likes": 1, - "id": 28249 - }, - { - "lang": "en", - "url": "http://assets.fanart.tv/fanart/tv/239761/clearlogo/wilfred-us-51f557082cfde.png", - "likes": 0, - "id": 31817 - } - ], - "arts": [ - { - "lang": "en", - "url": "http://assets.fanart.tv/fanart/tv/239761/clearart/wilfred-us-4e05f10e87711.png", - "likes": 2, - "id": 11987 - }, - { - "lang": "en", - "url": "http://assets.fanart.tv/fanart/tv/239761/clearart/wilfred-us-4e2f151d5ed62.png", - "likes": 1, - "id": 12470 - } - ], - "name": "Wilfred (US)", - "hdarts": [ - { - "lang": "en", - "url": "http://assets.fanart.tv/fanart/tv/239761/hdclearart/wilfred-us-505f94ed0ba13.png", - "likes": 1, - "id": 21112 - }, - { - "lang": "he", - "url": "http://assets.fanart.tv/fanart/tv/239761/hdclearart/wilfred-us-52403264aa3ec.png", - "likes": 1, - "id": 33751 - } - ], - "backgrounds": [ - { - "lang": "en", - "url": "http://assets.fanart.tv/fanart/tv/239761/showbackground/wilfred-us-5034dbd49115e.jpg", - "id": 19965, - "season": 0, - "likes": 0 - }, - { - "lang": "en", - "url": "http://assets.fanart.tv/fanart/tv/239761/showbackground/wilfred-us-50b0c92db6973.jpg", - "id": 23166, - "season": 0, - "likes": 0 - }, - { - "lang": "en", - "url": "http://assets.fanart.tv/fanart/tv/239761/showbackground/wilfred-us-50b0c92dbb46b.jpg", - "id": 23167, - "season": 0, - "likes": 0 - }, - { - "lang": "en", - "url": "http://assets.fanart.tv/fanart/tv/239761/showbackground/wilfred-us-50b0c92dbb9d1.jpg", - "id": 23168, - "season": 0, - "likes": 0 - } - ], - "thumbs": [ - { - "lang": "en", - "url": "http://assets.fanart.tv/fanart/tv/239761/tvthumb/wilfred-us-501cf526174fe.jpg", - "likes": 1, - "id": 19596 - }, - { - "lang": "en", - "url": "http://assets.fanart.tv/fanart/tv/239761/tvthumb/wilfred-us-51bfb4a105904.jpg", - "likes": 0, - "id": 30060 - } - ], - "characters": [], - "posters": [ - { - "lang": "he", - "url": "http://assets.fanart.tv/fanart/tv/239761/tvposter/wilfred-us-525d893230d7c.jpg", - "likes": 1, - "id": 34584 - } - ], - "seasons": [ - { - "lang": "he", - "url": "http://assets.fanart.tv/fanart/tv/239761/seasonthumb/wilfred-us-52403782bab55.jpg", - "id": 33752, - "season": 1, - "likes": 1 - }, - { - "lang": "he", - "url": "http://assets.fanart.tv/fanart/tv/239761/seasonthumb/wilfred-us-5240379335232.jpg", - "id": 33753, - "season": 2, - "likes": 1 - }, - { - "lang": "he", - "url": "http://assets.fanart.tv/fanart/tv/239761/seasonthumb/wilfred-us-524037bc83c7d.jpg", - "id": 33754, - "season": 3, - "likes": 1 - }, - { - "lang": "en", - "url": "http://assets.fanart.tv/fanart/tv/239761/seasonthumb/wilfred-us-501bb0a8e60f9.jpg", - "id": 19586, - "season": 1, - "likes": 0 - }, - { - "lang": "en", - "url": "http://assets.fanart.tv/fanart/tv/239761/seasonthumb/wilfred-us-501bb0b4bf229.jpg", - "id": 19587, - "season": 2, - "likes": 0 - }, - { - "lang": "en", - "url": "http://assets.fanart.tv/fanart/tv/239761/seasonthumb/wilfred-us-501bb144e6a46.jpg", - "id": 19588, - "season": 0, - "likes": 0 - }, - { - "lang": "en", - "url": "http://assets.fanart.tv/fanart/tv/239761/seasonthumb/wilfred-us-51c953105ef77.jpg", - "id": 30309, - "season": 3, - "likes": 0 - } - ], - "banners": [ - { - "lang": "he", - "url": "http://assets.fanart.tv/fanart/tv/239761/tvbanner/wilfred-us-52403a7185070.jpg", - "likes": 1, - "id": 33755 - }, - { - "lang": "en", - "url": "http://assets.fanart.tv/fanart/tv/239761/tvbanner/wilfred-us-5265193db51f7.jpg", - "likes": 0, - "id": 34716 - } - ], - "hdlogos": [ - { - "lang": "en", - "url": "http://assets.fanart.tv/fanart/tv/239761/hdtvlogo/wilfred-us-505f373be58e6.png", - "likes": 1, - "id": 21101 - }, - { - "lang": "en", - "url": "http://assets.fanart.tv/fanart/tv/239761/hdtvlogo/wilfred-us-517ac360def17.png", - "likes": 1, - "id": 28248 - }, - { - "lang": "he", - "url": "http://assets.fanart.tv/fanart/tv/239761/hdtvlogo/wilfred-us-52402df7ed945.png", - "likes": 1, - "id": 33750 - }, - { - "lang": "en", - "url": "http://assets.fanart.tv/fanart/tv/239761/hdtvlogo/wilfred-us-51f556fb4abd3.png", - "likes": 0, - "id": 31816 - } - ], - "tvdbid": "239761" -} diff --git a/lib/fanart/tests/response/50x50.png b/lib/fanart/tests/response/50x50.png deleted file mode 100644 index 0ba41614ca1b571daa258e917d1a1e6f5143790c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 180 zcmeAS@N?(olHy`uVBq!ia0vp^Mj*_>3?$zOPHh5G(g8jpu4m4inKo_OoH=tAFJ8QT z`}U(pk8a$!an-6-`}Xa7TsYkfs4T?O#WBR9H#tFqb#X&j!^tBj89g+2uShsyFhhz# zll57Gk1ZS9o0B}J&YgK-wB`&K+nPl@jBXM}I%k%#t&!s4J0`Jg;|fbl18YNzr3pI{ eM0%LsFfhcQVxRr2jC~%^d Date: Mon, 27 Jun 2016 12:23:09 -0400 Subject: [PATCH 101/134] Libs (#699) * Update concurrent.futures to 3.0.5 * Update requests to 2.10.0 * Update rarfile to 2.8 * Update pytz to 2016.4 * Update enzyme to 0.4.2 #9572bea606a6145dad153cd712653d6cf10ef18e * Update dogpile.cache to 0.6.1 Note: removed dogpile.core 0.4.1 as it is not used by dogpile.cache since 0.6.0 * Update chardet to 2.3.0 #727fc85fd35d710fc4444b8057c42a9e4ff44d80 * Update xmltodict to 0.10.2 * Update profilehooks to 1.8.1 * Update contextlib2 to 0.5.3 * Update validators to 0.10.3 * Update backports-abc to 0.4 Update backports.ssl-match-hostname to 3.5.0.1 * Add singledispatch 3.4.0.3 (Tornado 4.3 dependency) * Update tornado to 4.3 * Update stevedore to 1.15.0 * Update SQLAlchemy to 1.0.13 * Update oauth2 to 1.9.0 * Update httplib2 to 0.9.2 * Update pyasn1 to 0.1.9 * Update Mako to 1.0.4 * Update MarkupSafe to 0.23 * Update jsonrpclib to 0.1.7 * Update enum34 to 1.1.6 * Remove deprecated paraameter 'require_tld' * Update html5lib to 1.0b8/0.9999999 * Update pylockfile to 0.12.2 * Update ndg-httpsclient to 0.4.1 --- lib/backports/ssl_match_hostname/LICENSE.txt | 51 - lib/backports/ssl_match_hostname/README.txt | 52 - lib/backports/ssl_match_hostname/__init__.py | 58 +- lib/backports_abc.py | 202 ++ lib/chardet/charsetprober.py | 23 +- lib/chardet/enums.py | 15 +- lib/chardet/jpcntx.py | 2 +- lib/chardet/mbcssm.py | 2 +- lib/chardet/sbcharsetprober.py | 2 +- lib/chardet/sbcsgroupprober.py | 6 +- lib/chardet/universaldetector.py | 20 +- lib/concurrent/futures/__init__.py | 2 +- lib/concurrent/futures/_base.py | 54 +- lib/concurrent/futures/_compat.py | 111 - lib/concurrent/futures/process.py | 18 +- lib/concurrent/futures/thread.py | 18 +- lib/contextlib2.py | 22 +- lib/dogpile/__init__.py | 10 +- lib/dogpile/cache/__init__.py | 5 +- lib/dogpile/cache/api.py | 16 +- lib/dogpile/cache/backends/file.py | 6 +- lib/dogpile/cache/backends/memcached.py | 29 +- lib/dogpile/cache/backends/memory.py | 4 +- lib/dogpile/cache/backends/null.py | 6 +- lib/dogpile/cache/backends/redis.py | 5 +- lib/dogpile/cache/proxy.py | 4 +- lib/dogpile/cache/region.py | 40 +- lib/dogpile/cache/util.py | 119 +- lib/dogpile/core.py | 17 + lib/dogpile/core/__init__.py | 11 - lib/dogpile/core/legacy.py | 154 -- lib/dogpile/core/util.py | 8 - lib/dogpile/{core/dogpile.py => lock.py} | 24 +- lib/dogpile/util/__init__.py | 4 + lib/dogpile/{cache => util}/compat.py | 0 lib/dogpile/util/langhelpers.py | 120 + lib/dogpile/{core => util}/nameregistry.py | 3 +- lib/dogpile/{core => util}/readwrite_lock.py | 22 +- lib/{enum34 => enum}/LICENSE | 0 lib/{enum34 => enum}/README | 1 + lib/{enum34 => enum}/__init__.py | 117 +- lib/enum34/doc/enum.rst | 725 ------ lib/enum34/enum.py | 790 ------ lib/enum34/test_enum.py | 1690 ------------- lib/enzyme/__init__.py | 5 +- lib/html5lib/__init__.py | 4 +- lib/html5lib/constants.py | 388 ++- lib/html5lib/filters/lint.py | 41 +- lib/html5lib/html5parser.py | 19 +- lib/html5lib/inputstream.py | 35 +- lib/html5lib/sanitizer.py | 43 +- lib/html5lib/serializer/htmlserializer.py | 19 +- lib/html5lib/treebuilders/dom.py | 2 +- lib/html5lib/treewalkers/__init__.py | 90 + lib/html5lib/treewalkers/_base.py | 8 +- lib/html5lib/treewalkers/dom.py | 3 - lib/html5lib/treewalkers/etree.py | 6 +- lib/html5lib/treewalkers/lxmletree.py | 11 +- lib/html5lib/utils.py | 23 +- lib/httplib2/__init__.py | 902 +++++-- lib/httplib2/cacerts.txt | 2183 +++++++++++++++++ lib/httplib2/iri2uri.py | 58 +- lib/httplib2/socks.py | 438 ++++ lib/jsonrpclib/SimpleJSONRPCServer.py | 47 +- lib/jsonrpclib/__init__.py | 9 +- lib/jsonrpclib/config.py | 8 +- lib/jsonrpclib/history.py | 6 +- lib/jsonrpclib/jsonclass.py | 38 +- lib/jsonrpclib/jsonrpc.py | 170 +- lib/lockfile/__init__.py | 24 +- lib/lockfile/linklockfile.py | 4 +- lib/lockfile/mkdirlockfile.py | 9 +- lib/lockfile/pidlockfile.py | 15 +- lib/lockfile/sqlitelockfile.py | 7 +- lib/lockfile/symlinklockfile.py | 19 +- lib/mako/__init__.py | 4 +- lib/mako/_ast_util.py | 2 +- lib/mako/ast.py | 2 +- lib/mako/cache.py | 2 +- lib/mako/cmd.py | 2 +- lib/mako/codegen.py | 2 +- lib/mako/compat.py | 1 + lib/mako/exceptions.py | 2 +- lib/mako/ext/autohandler.py | 2 +- lib/mako/ext/babelplugin.py | 2 +- lib/mako/ext/preprocessors.py | 2 +- lib/mako/ext/pygmentplugin.py | 2 +- lib/mako/ext/turbogears.py | 2 +- lib/mako/filters.py | 2 +- lib/mako/lexer.py | 28 +- lib/mako/lookup.py | 2 +- lib/mako/parsetree.py | 2 +- lib/mako/pygen.py | 2 +- lib/mako/pyparser.py | 2 +- lib/mako/runtime.py | 2 +- lib/mako/template.py | 10 +- lib/mako/util.py | 2 +- lib/markupsafe/_speedups.pyd | Bin 0 -> 8704 bytes lib/ndg/__init__.py | 30 +- lib/ndg/httpsclient/LICENSE | 26 + lib/ndg/httpsclient/https.py | 20 +- lib/ndg/httpsclient/ssl_context_util.py | 9 +- lib/ndg/httpsclient/ssl_peer_verification.py | 24 +- lib/ndg/httpsclient/ssl_socket.py | 31 +- lib/ndg/httpsclient/subj_alt_name.py | 2 +- lib/ndg/httpsclient/test/pki/ca/08bd99c7.0 | 20 + lib/ndg/httpsclient/test/pki/ca/ade0138a.0 | 20 + lib/ndg/httpsclient/test/pki/localhost.crt | 92 +- lib/ndg/httpsclient/test/pki/localhost.key | 38 +- lib/ndg/httpsclient/test/test_https.py | 23 +- lib/ndg/httpsclient/test/test_urllib2.py | 18 +- lib/ndg/httpsclient/test/test_utils.py | 13 +- lib/ndg/httpsclient/urllib2_build_opener.py | 23 +- lib/ndg/httpsclient/utils.py | 79 +- lib/oauth2/__init__.py | 530 ++-- lib/oauth2/_compat.py | 48 + lib/oauth2/_version.py | 19 + lib/oauth2/clients/__init__.py | 0 lib/oauth2/clients/imap.py | 40 + lib/oauth2/clients/smtp.py | 41 + lib/profilehooks.py | 289 ++- lib/pyasn1/LICENSE | 24 - lib/pyasn1/__init__.py | 4 +- lib/pyasn1/codec/ber/decoder.py | 153 +- lib/pyasn1/codec/ber/encoder.py | 168 +- lib/pyasn1/codec/cer/decoder.py | 6 +- lib/pyasn1/codec/cer/encoder.py | 61 +- lib/pyasn1/codec/der/decoder.py | 4 +- lib/pyasn1/codec/der/encoder.py | 12 +- lib/pyasn1/compat/binary.py | 10 + lib/pyasn1/compat/octets.py | 2 + lib/pyasn1/debug.py | 65 +- lib/pyasn1/type/base.py | 81 +- lib/pyasn1/type/char.py | 17 +- lib/pyasn1/type/namedtype.py | 31 +- lib/pyasn1/type/namedval.py | 12 + lib/pyasn1/type/tag.py | 10 +- lib/pyasn1/type/tagmap.py | 18 +- lib/pyasn1/type/univ.py | 232 +- lib/pyasn1/type/useful.py | 5 + lib/pytz/__init__.py | 8 +- lib/pytz/zoneinfo/America/Caracas | Bin 266 -> 261 bytes lib/pytz/zoneinfo/Asia/Almaty | Bin 936 -> 1017 bytes lib/pytz/zoneinfo/Asia/Anadyr | Bin 1197 -> 1197 bytes lib/pytz/zoneinfo/Asia/Aqtau | Bin 1142 -> 1003 bytes lib/pytz/zoneinfo/Asia/Aqtobe | Bin 1052 -> 1033 bytes lib/pytz/zoneinfo/Asia/Baku | Bin 1317 -> 1333 bytes lib/pytz/zoneinfo/Asia/Barnaul | Bin 1241 -> 1241 bytes lib/pytz/zoneinfo/Asia/Chita | Bin 1266 -> 1266 bytes lib/pytz/zoneinfo/Asia/Irkutsk | Bin 1259 -> 1259 bytes lib/pytz/zoneinfo/Asia/Kamchatka | Bin 1181 -> 1181 bytes lib/pytz/zoneinfo/Asia/Khandyga | Bin 1324 -> 1324 bytes lib/pytz/zoneinfo/Asia/Krasnoyarsk | Bin 1226 -> 1226 bytes lib/pytz/zoneinfo/Asia/Magadan | Bin 1227 -> 1241 bytes lib/pytz/zoneinfo/Asia/Novokuznetsk | Bin 1248 -> 1248 bytes lib/pytz/zoneinfo/Asia/Novosibirsk | Bin 1208 -> 1208 bytes lib/pytz/zoneinfo/Asia/Omsk | Bin 1226 -> 1226 bytes lib/pytz/zoneinfo/Asia/Oral | Bin 1100 -> 1017 bytes lib/pytz/zoneinfo/Asia/Qyzylorda | Bin 1082 -> 1033 bytes lib/pytz/zoneinfo/Asia/Sakhalin | Bin 1257 -> 1257 bytes lib/pytz/zoneinfo/Asia/Srednekolymsk | Bin 1237 -> 1237 bytes lib/pytz/zoneinfo/Asia/Tomsk | Bin 0 -> 1241 bytes lib/pytz/zoneinfo/Asia/Ust-Nera | Bin 1293 -> 1293 bytes lib/pytz/zoneinfo/Asia/Vladivostok | Bin 1227 -> 1227 bytes lib/pytz/zoneinfo/Asia/Yakutsk | Bin 1226 -> 1226 bytes lib/pytz/zoneinfo/Asia/Yekaterinburg | Bin 1334 -> 1302 bytes lib/pytz/zoneinfo/Asia/Yerevan | Bin 1277 -> 1275 bytes lib/pytz/zoneinfo/Europe/Astrakhan | Bin 1183 -> 1183 bytes lib/pytz/zoneinfo/Europe/Kaliningrad | Bin 1550 -> 1518 bytes lib/pytz/zoneinfo/Europe/Kirov | Bin 0 -> 1153 bytes lib/pytz/zoneinfo/Europe/Minsk | Bin 1368 -> 1368 bytes lib/pytz/zoneinfo/Europe/Moscow | Bin 1528 -> 1528 bytes lib/pytz/zoneinfo/Europe/Samara | Bin 1362 -> 1346 bytes lib/pytz/zoneinfo/Europe/Ulyanovsk | Bin 1267 -> 1267 bytes lib/pytz/zoneinfo/Europe/Volgograd | Bin 1325 -> 1309 bytes lib/pytz/zoneinfo/W-SU | Bin 1528 -> 1528 bytes lib/pytz/zoneinfo/zone.tab | 8 +- lib/pytz/zoneinfo/zone1970.tab | 8 +- lib/{rarfile/__init__.py => rarfile.py} | 111 +- lib/rarfile/dumprar.py | 361 --- lib/requests/__init__.py | 12 +- lib/requests/adapters.py | 50 +- lib/requests/api.py | 6 +- lib/requests/auth.py | 1 + lib/requests/models.py | 10 +- lib/requests/packages/README.rst | 11 - lib/requests/packages/urllib3/__init__.py | 11 +- lib/requests/packages/urllib3/_collections.py | 2 +- lib/requests/packages/urllib3/connection.py | 64 +- .../packages/urllib3/connectionpool.py | 93 +- .../packages/urllib3/contrib/appengine.py | 12 +- .../packages/urllib3/contrib/ntlmpool.py | 20 +- .../packages/urllib3/contrib/pyopenssl.py | 64 +- lib/requests/packages/urllib3/exceptions.py | 8 + lib/requests/packages/urllib3/fields.py | 4 +- lib/requests/packages/urllib3/poolmanager.py | 17 +- lib/requests/packages/urllib3/response.py | 38 +- .../packages/urllib3/util/__init__.py | 2 + .../packages/urllib3/util/response.py | 2 +- lib/requests/packages/urllib3/util/retry.py | 14 +- lib/requests/packages/urllib3/util/ssl_.py | 11 +- lib/requests/sessions.py | 45 +- lib/requests/status_codes.py | 1 + lib/requests/structures.py | 4 +- lib/requests/utils.py | 30 +- lib/singledispatch.py | 219 ++ lib/singledispatch_helpers.py | 170 ++ lib/sqlalchemy/__init__.py | 13 +- lib/sqlalchemy/cextension/processors.c | 706 ------ lib/sqlalchemy/cextension/resultproxy.c | 718 ------ lib/sqlalchemy/cextension/utils.c | 225 -- lib/sqlalchemy/connectors/__init__.py | 3 +- lib/sqlalchemy/connectors/mxodbc.py | 9 +- lib/sqlalchemy/connectors/mysqldb.py | 144 -- lib/sqlalchemy/connectors/pyodbc.py | 47 +- lib/sqlalchemy/connectors/zxJDBC.py | 13 +- lib/sqlalchemy/cprocessors.pyd | Bin 0 -> 11776 bytes lib/sqlalchemy/cresultproxy.pyd | Bin 0 -> 13824 bytes lib/sqlalchemy/cutils.pyd | Bin 0 -> 8192 bytes lib/sqlalchemy/databases/__init__.py | 7 +- lib/sqlalchemy/dialects/__init__.py | 7 +- lib/sqlalchemy/dialects/drizzle/__init__.py | 22 - lib/sqlalchemy/dialects/drizzle/base.py | 498 ---- lib/sqlalchemy/dialects/drizzle/mysqldb.py | 48 - lib/sqlalchemy/dialects/firebird/__init__.py | 3 +- lib/sqlalchemy/dialects/firebird/base.py | 168 +- lib/sqlalchemy/dialects/firebird/fdb.py | 23 +- .../dialects/firebird/kinterbasdb.py | 31 +- lib/sqlalchemy/dialects/mssql/__init__.py | 5 +- lib/sqlalchemy/dialects/mssql/adodbapi.py | 7 +- lib/sqlalchemy/dialects/mssql/base.py | 949 +++++-- .../dialects/mssql/information_schema.py | 144 +- lib/sqlalchemy/dialects/mssql/mxodbc.py | 11 +- lib/sqlalchemy/dialects/mssql/pymssql.py | 22 +- lib/sqlalchemy/dialects/mssql/pyodbc.py | 178 +- lib/sqlalchemy/dialects/mssql/zxjdbc.py | 18 +- lib/sqlalchemy/dialects/mysql/__init__.py | 17 +- lib/sqlalchemy/dialects/mysql/base.py | 1094 ++++++--- lib/sqlalchemy/dialects/mysql/cymysql.py | 9 +- lib/sqlalchemy/dialects/mysql/gaerdbms.py | 28 +- .../dialects/mysql/mysqlconnector.py | 73 +- lib/sqlalchemy/dialects/mysql/mysqldb.py | 186 +- lib/sqlalchemy/dialects/mysql/oursql.py | 41 +- lib/sqlalchemy/dialects/mysql/pymysql.py | 26 +- lib/sqlalchemy/dialects/mysql/pyodbc.py | 19 +- lib/sqlalchemy/dialects/mysql/zxjdbc.py | 14 +- lib/sqlalchemy/dialects/oracle/__init__.py | 11 +- lib/sqlalchemy/dialects/oracle/base.py | 745 ++++-- lib/sqlalchemy/dialects/oracle/cx_oracle.py | 352 +-- lib/sqlalchemy/dialects/oracle/zxjdbc.py | 51 +- lib/sqlalchemy/dialects/postgres.py | 10 +- .../dialects/postgresql/__init__.py | 16 +- lib/sqlalchemy/dialects/postgresql/base.py | 1293 +++++++--- .../dialects/postgresql/constraints.py | 57 +- lib/sqlalchemy/dialects/postgresql/hstore.py | 27 +- lib/sqlalchemy/dialects/postgresql/json.py | 207 +- lib/sqlalchemy/dialects/postgresql/pg8000.py | 182 +- .../dialects/postgresql/psycopg2.py | 400 ++- .../dialects/postgresql/psycopg2cffi.py | 61 + .../dialects/postgresql/pypostgresql.py | 23 +- lib/sqlalchemy/dialects/postgresql/ranges.py | 10 +- lib/sqlalchemy/dialects/postgresql/zxjdbc.py | 3 +- lib/sqlalchemy/dialects/sqlite/__init__.py | 5 +- lib/sqlalchemy/dialects/sqlite/base.py | 911 +++++-- lib/sqlalchemy/dialects/sqlite/pysqlcipher.py | 116 + lib/sqlalchemy/dialects/sqlite/pysqlite.py | 100 +- lib/sqlalchemy/dialects/sybase/__init__.py | 13 +- lib/sqlalchemy/dialects/sybase/base.py | 157 +- lib/sqlalchemy/dialects/sybase/mxodbc.py | 3 +- lib/sqlalchemy/dialects/sybase/pyodbc.py | 10 +- lib/sqlalchemy/dialects/sybase/pysybase.py | 14 +- .../dialects/type_migration_guidelines.txt | 145 -- lib/sqlalchemy/engine/__init__.py | 87 +- lib/sqlalchemy/engine/base.py | 840 +++++-- lib/sqlalchemy/engine/default.py | 413 ++-- lib/sqlalchemy/engine/interfaces.py | 337 ++- lib/sqlalchemy/engine/reflection.py | 354 ++- lib/sqlalchemy/engine/result.py | 615 +++-- lib/sqlalchemy/engine/strategies.py | 40 +- lib/sqlalchemy/engine/threadlocal.py | 18 +- lib/sqlalchemy/engine/url.py | 46 +- lib/sqlalchemy/engine/util.py | 6 +- lib/sqlalchemy/event/__init__.py | 3 +- lib/sqlalchemy/event/api.py | 69 +- lib/sqlalchemy/event/attr.py | 153 +- lib/sqlalchemy/event/base.py | 174 +- lib/sqlalchemy/event/legacy.py | 147 +- lib/sqlalchemy/event/registry.py | 83 +- lib/sqlalchemy/events.py | 283 ++- lib/sqlalchemy/exc.py | 79 +- lib/sqlalchemy/ext/__init__.py | 8 +- lib/sqlalchemy/ext/associationproxy.py | 91 +- lib/sqlalchemy/ext/automap.py | 549 +++-- lib/sqlalchemy/ext/baked.py | 523 ++++ lib/sqlalchemy/ext/compiler.py | 29 +- lib/sqlalchemy/ext/declarative/__init__.py | 1310 +--------- lib/sqlalchemy/ext/declarative/api.py | 238 +- lib/sqlalchemy/ext/declarative/base.py | 738 +++--- lib/sqlalchemy/ext/declarative/clsregistry.py | 53 +- lib/sqlalchemy/ext/horizontal_shard.py | 19 +- lib/sqlalchemy/ext/hybrid.py | 28 +- lib/sqlalchemy/ext/instrumentation.py | 15 +- lib/sqlalchemy/ext/mutable.py | 95 +- lib/sqlalchemy/ext/orderinglist.py | 34 +- lib/sqlalchemy/ext/serializer.py | 15 +- lib/sqlalchemy/inspection.py | 9 +- lib/sqlalchemy/interfaces.py | 10 +- lib/sqlalchemy/log.py | 13 +- lib/sqlalchemy/orm/__init__.py | 100 +- lib/sqlalchemy/orm/attributes.py | 511 ++-- lib/sqlalchemy/orm/base.py | 297 ++- lib/sqlalchemy/orm/collections.py | 166 +- lib/sqlalchemy/orm/dependency.py | 592 ++--- lib/sqlalchemy/orm/deprecated_interfaces.py | 165 +- lib/sqlalchemy/orm/descriptor_props.py | 161 +- lib/sqlalchemy/orm/dynamic.py | 86 +- lib/sqlalchemy/orm/evaluator.py | 42 +- lib/sqlalchemy/orm/events.py | 812 +++--- lib/sqlalchemy/orm/exc.py | 6 +- lib/sqlalchemy/orm/identity.py | 162 +- lib/sqlalchemy/orm/instrumentation.py | 89 +- lib/sqlalchemy/orm/interfaces.py | 277 ++- lib/sqlalchemy/orm/loading.py | 744 +++--- lib/sqlalchemy/orm/mapper.py | 810 +++--- lib/sqlalchemy/orm/path_registry.py | 64 +- lib/sqlalchemy/orm/persistence.py | 1123 +++++---- lib/sqlalchemy/orm/properties.py | 77 +- lib/sqlalchemy/orm/query.py | 1532 ++++++++---- lib/sqlalchemy/orm/relationships.py | 1349 +++++----- lib/sqlalchemy/orm/scoping.py | 26 +- lib/sqlalchemy/orm/session.py | 969 +++++--- lib/sqlalchemy/orm/state.py | 326 ++- lib/sqlalchemy/orm/strategies.py | 1047 ++++---- lib/sqlalchemy/orm/strategy_options.py | 316 ++- lib/sqlalchemy/orm/sync.py | 66 +- lib/sqlalchemy/orm/unitofwork.py | 142 +- lib/sqlalchemy/orm/util.py | 307 ++- lib/sqlalchemy/pool.py | 389 ++- lib/sqlalchemy/processors.py | 25 +- lib/sqlalchemy/schema.py | 10 +- lib/sqlalchemy/sql/__init__.py | 10 +- lib/sqlalchemy/sql/annotation.py | 25 +- lib/sqlalchemy/sql/base.py | 150 +- lib/sqlalchemy/sql/compiler.py | 1883 +++++++------- lib/sqlalchemy/sql/crud.py | 571 +++++ lib/sqlalchemy/sql/ddl.py | 427 +++- lib/sqlalchemy/sql/default_comparator.py | 530 ++-- lib/sqlalchemy/sql/dml.py | 374 +-- lib/sqlalchemy/sql/elements.py | 1201 ++++++--- lib/sqlalchemy/sql/expression.py | 62 +- lib/sqlalchemy/sql/functions.py | 138 +- lib/sqlalchemy/sql/naming.py | 97 +- lib/sqlalchemy/sql/operators.py | 96 +- lib/sqlalchemy/sql/schema.py | 1319 ++++++---- lib/sqlalchemy/sql/selectable.py | 1091 +++++--- lib/sqlalchemy/sql/sqltypes.py | 308 ++- lib/sqlalchemy/sql/type_api.py | 200 +- lib/sqlalchemy/sql/util.py | 285 ++- lib/sqlalchemy/sql/visitors.py | 30 +- lib/sqlalchemy/testing/__init__.py | 20 +- lib/sqlalchemy/testing/assertions.py | 252 +- lib/sqlalchemy/testing/assertsql.py | 529 ++-- lib/sqlalchemy/testing/config.py | 31 +- lib/sqlalchemy/testing/distutils_run.py | 11 + lib/sqlalchemy/testing/engines.py | 166 +- lib/sqlalchemy/testing/entities.py | 8 +- lib/sqlalchemy/testing/exclusions.py | 360 +-- lib/sqlalchemy/testing/fixtures.py | 62 +- lib/sqlalchemy/testing/mock.py | 12 +- lib/sqlalchemy/testing/pickleable.py | 7 +- lib/sqlalchemy/testing/plugin/bootstrap.py | 44 + lib/sqlalchemy/testing/plugin/noseplugin.py | 52 +- lib/sqlalchemy/testing/plugin/plugin_base.py | 363 ++- lib/sqlalchemy/testing/plugin/pytestplugin.py | 96 +- lib/sqlalchemy/testing/profiling.py | 240 +- lib/sqlalchemy/testing/provision.py | 308 +++ lib/sqlalchemy/testing/replay_fixture.py | 172 ++ lib/sqlalchemy/testing/requirements.py | 154 +- lib/sqlalchemy/testing/runner.py | 6 +- lib/sqlalchemy/testing/schema.py | 14 +- lib/sqlalchemy/testing/suite/__init__.py | 1 + lib/sqlalchemy/testing/suite/test_ddl.py | 14 +- lib/sqlalchemy/testing/suite/test_dialect.py | 41 + lib/sqlalchemy/testing/suite/test_insert.py | 123 +- .../testing/suite/test_reflection.py | 288 ++- lib/sqlalchemy/testing/suite/test_results.py | 86 +- lib/sqlalchemy/testing/suite/test_select.py | 120 +- lib/sqlalchemy/testing/suite/test_sequence.py | 32 +- lib/sqlalchemy/testing/suite/test_types.py | 148 +- .../testing/suite/test_update_delete.py | 12 +- lib/sqlalchemy/testing/util.py | 87 +- lib/sqlalchemy/testing/warnings.py | 46 +- lib/sqlalchemy/types.py | 21 +- lib/sqlalchemy/util/__init__.py | 20 +- lib/sqlalchemy/util/_collections.py | 254 +- lib/sqlalchemy/util/compat.py | 86 +- lib/sqlalchemy/util/deprecations.py | 11 +- lib/sqlalchemy/util/langhelpers.py | 323 ++- lib/sqlalchemy/util/queue.py | 4 +- lib/sqlalchemy/util/topological.py | 36 +- lib/stevedore/__init__.py | 14 +- lib/stevedore/tests/manager.py | 4 - lib/tornado/__init__.py | 4 +- lib/tornado/_locale_data.py | 94 + lib/tornado/auth.py | 138 +- lib/tornado/autoreload.py | 11 +- lib/tornado/concurrent.py | 28 +- lib/tornado/curl_httpclient.py | 38 +- lib/tornado/gen.py | 308 ++- lib/tornado/http1connection.py | 23 +- lib/tornado/httpclient.py | 7 +- lib/tornado/httpserver.py | 1 - lib/tornado/httputil.py | 46 +- lib/tornado/ioloop.py | 85 +- lib/tornado/iostream.py | 73 +- lib/tornado/locale.py | 105 +- lib/tornado/locks.py | 88 +- lib/tornado/log.py | 29 +- lib/tornado/netutil.py | 11 +- lib/tornado/options.py | 74 +- lib/tornado/platform/asyncio.py | 81 +- lib/tornado/platform/twisted.py | 116 +- lib/tornado/process.py | 9 +- lib/tornado/queues.py | 48 +- lib/tornado/simple_httpclient.py | 28 +- lib/tornado/speedups.c | 52 - lib/tornado/speedups.pyd | Bin 0 -> 6656 bytes lib/tornado/template.py | 208 +- lib/tornado/test/asyncio_test.py | 78 +- lib/tornado/test/auth_test.py | 134 +- lib/tornado/test/curl_httpclient_test.py | 1 + lib/tornado/test/gen_test.py | 150 +- lib/tornado/test/httpclient_test.py | 76 +- lib/tornado/test/httpserver_test.py | 21 +- lib/tornado/test/httputil_test.py | 22 +- lib/tornado/test/ioloop_test.py | 71 +- lib/tornado/test/iostream_test.py | 45 +- lib/tornado/test/locale_test.py | 29 +- lib/tornado/test/locks_test.py | 40 +- lib/tornado/test/log_test.py | 33 + lib/tornado/test/netutil_test.py | 13 +- lib/tornado/test/options_test.cfg | 4 +- lib/tornado/test/options_test.py | 42 + lib/tornado/test/queues_test.py | 32 +- lib/tornado/test/simple_httpclient_test.py | 102 +- lib/tornado/test/static/sample.xml | 23 + lib/tornado/test/static/sample.xml.bz2 | Bin 0 -> 285 bytes lib/tornado/test/static/sample.xml.gz | Bin 0 -> 264 bytes lib/tornado/test/tcpserver_test.py | 3 +- lib/tornado/test/template_test.py | 75 +- lib/tornado/test/testing_test.py | 52 +- lib/tornado/test/twisted_test.py | 47 +- lib/tornado/test/util.py | 26 + lib/tornado/test/web_test.py | 244 +- lib/tornado/test/websocket_test.py | 2 +- lib/tornado/testing.py | 74 +- lib/tornado/util.py | 25 +- lib/tornado/web.py | 161 +- lib/tornado/websocket.py | 21 +- lib/validators/__init__.py | 2 +- lib/validators/domain.py | 2 +- lib/validators/iban.py | 2 +- lib/validators/url.py | 79 +- lib/xmltodict.py | 106 +- sickbeard/providers/bitcannon.py | 2 +- sickbeard/providers/newznab.py | 6 +- 466 files changed, 36356 insertions(+), 24497 deletions(-) delete mode 100644 lib/backports/ssl_match_hostname/LICENSE.txt delete mode 100644 lib/backports/ssl_match_hostname/README.txt create mode 100644 lib/backports_abc.py delete mode 100644 lib/concurrent/futures/_compat.py create mode 100644 lib/dogpile/core.py delete mode 100644 lib/dogpile/core/__init__.py delete mode 100644 lib/dogpile/core/legacy.py delete mode 100644 lib/dogpile/core/util.py rename lib/dogpile/{core/dogpile.py => lock.py} (92%) create mode 100644 lib/dogpile/util/__init__.py rename lib/dogpile/{cache => util}/compat.py (100%) create mode 100644 lib/dogpile/util/langhelpers.py rename lib/dogpile/{core => util}/nameregistry.py (98%) rename lib/dogpile/{core => util}/readwrite_lock.py (95%) rename lib/{enum34 => enum}/LICENSE (100%) rename lib/{enum34 => enum}/README (81%) rename lib/{enum34 => enum}/__init__.py (89%) delete mode 100644 lib/enum34/doc/enum.rst delete mode 100644 lib/enum34/enum.py delete mode 100644 lib/enum34/test_enum.py create mode 100644 lib/httplib2/cacerts.txt create mode 100644 lib/httplib2/socks.py create mode 100644 lib/markupsafe/_speedups.pyd create mode 100644 lib/ndg/httpsclient/LICENSE create mode 100644 lib/ndg/httpsclient/test/pki/ca/08bd99c7.0 create mode 100644 lib/ndg/httpsclient/test/pki/ca/ade0138a.0 create mode 100644 lib/oauth2/_compat.py create mode 100644 lib/oauth2/_version.py create mode 100644 lib/oauth2/clients/__init__.py create mode 100644 lib/oauth2/clients/imap.py create mode 100644 lib/oauth2/clients/smtp.py delete mode 100644 lib/pyasn1/LICENSE create mode 100644 lib/pyasn1/compat/binary.py create mode 100644 lib/pytz/zoneinfo/Asia/Tomsk create mode 100644 lib/pytz/zoneinfo/Europe/Kirov rename lib/{rarfile/__init__.py => rarfile.py} (96%) delete mode 100644 lib/rarfile/dumprar.py delete mode 100644 lib/requests/packages/README.rst create mode 100644 lib/singledispatch.py create mode 100644 lib/singledispatch_helpers.py delete mode 100644 lib/sqlalchemy/cextension/processors.c delete mode 100644 lib/sqlalchemy/cextension/resultproxy.c delete mode 100644 lib/sqlalchemy/cextension/utils.c delete mode 100644 lib/sqlalchemy/connectors/mysqldb.py create mode 100644 lib/sqlalchemy/cprocessors.pyd create mode 100644 lib/sqlalchemy/cresultproxy.pyd create mode 100644 lib/sqlalchemy/cutils.pyd delete mode 100644 lib/sqlalchemy/dialects/drizzle/__init__.py delete mode 100644 lib/sqlalchemy/dialects/drizzle/base.py delete mode 100644 lib/sqlalchemy/dialects/drizzle/mysqldb.py create mode 100644 lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py create mode 100644 lib/sqlalchemy/dialects/sqlite/pysqlcipher.py delete mode 100644 lib/sqlalchemy/dialects/type_migration_guidelines.txt create mode 100644 lib/sqlalchemy/ext/baked.py create mode 100644 lib/sqlalchemy/sql/crud.py create mode 100644 lib/sqlalchemy/testing/distutils_run.py create mode 100644 lib/sqlalchemy/testing/plugin/bootstrap.py create mode 100644 lib/sqlalchemy/testing/provision.py create mode 100644 lib/sqlalchemy/testing/replay_fixture.py create mode 100644 lib/sqlalchemy/testing/suite/test_dialect.py create mode 100644 lib/tornado/_locale_data.py delete mode 100644 lib/tornado/speedups.c create mode 100644 lib/tornado/speedups.pyd create mode 100644 lib/tornado/test/static/sample.xml create mode 100644 lib/tornado/test/static/sample.xml.bz2 create mode 100644 lib/tornado/test/static/sample.xml.gz diff --git a/lib/backports/ssl_match_hostname/LICENSE.txt b/lib/backports/ssl_match_hostname/LICENSE.txt deleted file mode 100644 index 58058f1bb9..0000000000 --- a/lib/backports/ssl_match_hostname/LICENSE.txt +++ /dev/null @@ -1,51 +0,0 @@ -Python License (Python-2.0) - -Python License, Version 2 (Python-2.0) - -PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 --------------------------------------------- - -1. This LICENSE AGREEMENT is between the Python Software Foundation -("PSF"), and the Individual or Organization ("Licensee") accessing and -otherwise using this software ("Python") in source or binary form and -its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, PSF -hereby grants Licensee a nonexclusive, royalty-free, world-wide -license to reproduce, analyze, test, perform and/or display publicly, -prepare derivative works, distribute, and otherwise use Python -alone or in any derivative version, provided, however, that PSF's -License Agreement and PSF's notice of copyright, i.e., "Copyright (c) -2001-2013 Python Software Foundation; All Rights Reserved" are retained in -Python alone or in any derivative version prepared by Licensee. - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python. - -4. PSF is making Python available to Licensee on an "AS IS" -basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. Nothing in this License Agreement shall be deemed to create any -relationship of agency, partnership, or joint venture between PSF and -Licensee. This License Agreement does not grant permission to use PSF -trademarks or trade name in a trademark sense to endorse or promote -products or services of Licensee, or any third party. - -8. By copying, installing or otherwise using Python, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. diff --git a/lib/backports/ssl_match_hostname/README.txt b/lib/backports/ssl_match_hostname/README.txt deleted file mode 100644 index de3910a645..0000000000 --- a/lib/backports/ssl_match_hostname/README.txt +++ /dev/null @@ -1,52 +0,0 @@ - -The ssl.match_hostname() function from Python 3.4 -================================================= - -The Secure Sockets layer is only actually *secure* -if you check the hostname in the certificate returned -by the server to which you are connecting, -and verify that it matches to hostname -that you are trying to reach. - -But the matching logic, defined in `RFC2818`_, -can be a bit tricky to implement on your own. -So the ``ssl`` package in the Standard Library of Python 3.2 -and greater now includes a ``match_hostname()`` function -for performing this check instead of requiring every application -to implement the check separately. - -This backport brings ``match_hostname()`` to users -of earlier versions of Python. -Simply make this distribution a dependency of your package, -and then use it like this:: - - from backports.ssl_match_hostname import match_hostname, CertificateError - ... - sslsock = ssl.wrap_socket(sock, ssl_version=ssl.PROTOCOL_SSLv3, - cert_reqs=ssl.CERT_REQUIRED, ca_certs=...) - try: - match_hostname(sslsock.getpeercert(), hostname) - except CertificateError, ce: - ... - -Note that the ``ssl`` module is only included in the Standard Library -for Python 2.6 and later; -users of Python 2.5 or earlier versions -will also need to install the ``ssl`` distribution -from the Python Package Index to use code like that shown above. - -Brandon Craig Rhodes is merely the packager of this distribution; -the actual code inside comes verbatim from Python 3.4. - -History -------- -* This function was introduced in python-3.2 -* It was updated for python-3.4a1 for a CVE - (backports-ssl_match_hostname-3.4.0.1) -* It was updated from RFC2818 to RFC 6125 compliance in order to fix another - security flaw for python-3.3.3 and python-3.4a5 - (backports-ssl_match_hostname-3.4.0.2) - - -.. _RFC2818: http://tools.ietf.org/html/rfc2818.html - diff --git a/lib/backports/ssl_match_hostname/__init__.py b/lib/backports/ssl_match_hostname/__init__.py index 34f248f336..06538ec689 100644 --- a/lib/backports/ssl_match_hostname/__init__.py +++ b/lib/backports/ssl_match_hostname/__init__.py @@ -1,8 +1,20 @@ """The match_hostname() function from Python 3.3.3, essential when using SSL.""" import re +import sys + +# ipaddress has been backported to 2.6+ in pypi. If it is installed on the +# system, use it to handle IPAddress ServerAltnames (this was added in +# python-3.5) otherwise only do DNS matching. This allows +# backports.ssl_match_hostname to continue to be used all the way back to +# python-2.4. +try: + import ipaddress +except ImportError: + ipaddress = None + +__version__ = '3.5.0.1' -__version__ = '3.4.0.2' class CertificateError(ValueError): pass @@ -61,6 +73,23 @@ def _dnsname_match(dn, hostname, max_wildcards=1): return pat.match(hostname) +def _to_unicode(obj): + if isinstance(obj, str) and sys.version_info < (3,): + obj = unicode(obj, encoding='ascii', errors='strict') + return obj + +def _ipaddress_match(ipname, host_ip): + """Exact matching of IP addresses. + + RFC 6125 explicitly doesn't define an algorithm for this + (section 1.7.2 - "Out of Scope"). + """ + # OpenSSL may add a trailing newline to a subjectAltName's IP address + # Divergence from upstream: ipaddress can't handle byte str + ip = ipaddress.ip_address(_to_unicode(ipname).rstrip()) + return ip == host_ip + + def match_hostname(cert, hostname): """Verify that *cert* (in decoded format as returned by SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 @@ -70,12 +99,35 @@ def match_hostname(cert, hostname): returns nothing. """ if not cert: - raise ValueError("empty or no certificate") + raise ValueError("empty or no certificate, match_hostname needs a " + "SSL socket or SSL context with either " + "CERT_OPTIONAL or CERT_REQUIRED") + try: + # Divergence from upstream: ipaddress can't handle byte str + host_ip = ipaddress.ip_address(_to_unicode(hostname)) + except ValueError: + # Not an IP address (common case) + host_ip = None + except UnicodeError: + # Divergence from upstream: Have to deal with ipaddress not taking + # byte strings. addresses should be all ascii, so we consider it not + # an ipaddress in this case + host_ip = None + except AttributeError: + # Divergence from upstream: Make ipaddress library optional + if ipaddress is None: + host_ip = None + else: + raise dnsnames = [] san = cert.get('subjectAltName', ()) for key, value in san: if key == 'DNS': - if _dnsname_match(value, hostname): + if host_ip is None and _dnsname_match(value, hostname): + return + dnsnames.append(value) + elif key == 'IP Address': + if host_ip is not None and _ipaddress_match(value, host_ip): return dnsnames.append(value) if not dnsnames: diff --git a/lib/backports_abc.py b/lib/backports_abc.py new file mode 100644 index 0000000000..c48b7b0d54 --- /dev/null +++ b/lib/backports_abc.py @@ -0,0 +1,202 @@ +""" +Patch recently added ABCs into the standard lib module +``collections.abc`` (Py3) or ``collections`` (Py2). + +Usage:: + + import backports_abc + backports_abc.patch() + +or:: + + try: + from collections.abc import Generator + except ImportError: + from backports_abc import Generator +""" + +try: + import collections.abc as _collections_abc +except ImportError: + import collections as _collections_abc + + +def mk_gen(): + from abc import abstractmethod + + required_methods = ( + '__iter__', '__next__' if hasattr(iter(()), '__next__') else 'next', + 'send', 'throw', 'close') + + class Generator(_collections_abc.Iterator): + __slots__ = () + + if '__next__' in required_methods: + def __next__(self): + return self.send(None) + else: + def next(self): + return self.send(None) + + @abstractmethod + def send(self, value): + raise StopIteration + + @abstractmethod + def throw(self, typ, val=None, tb=None): + if val is None: + if tb is None: + raise typ + val = typ() + if tb is not None: + val = val.with_traceback(tb) + raise val + + def close(self): + try: + self.throw(GeneratorExit) + except (GeneratorExit, StopIteration): + pass + else: + raise RuntimeError('generator ignored GeneratorExit') + + @classmethod + def __subclasshook__(cls, C): + if cls is Generator: + mro = C.__mro__ + for method in required_methods: + for base in mro: + if method in base.__dict__: + break + else: + return NotImplemented + return True + return NotImplemented + + generator = type((lambda: (yield))()) + Generator.register(generator) + return Generator + + +def mk_awaitable(): + from abc import abstractmethod, ABCMeta + + @abstractmethod + def __await__(self): + yield + + @classmethod + def __subclasshook__(cls, C): + if cls is Awaitable: + for B in C.__mro__: + if '__await__' in B.__dict__: + if B.__dict__['__await__']: + return True + break + return NotImplemented + + # calling metaclass directly as syntax differs in Py2/Py3 + Awaitable = ABCMeta('Awaitable', (), { + '__slots__': (), + '__await__': __await__, + '__subclasshook__': __subclasshook__, + }) + + return Awaitable + + +def mk_coroutine(): + from abc import abstractmethod + + class Coroutine(Awaitable): + __slots__ = () + + @abstractmethod + def send(self, value): + """Send a value into the coroutine. + Return next yielded value or raise StopIteration. + """ + raise StopIteration + + @abstractmethod + def throw(self, typ, val=None, tb=None): + """Raise an exception in the coroutine. + Return next yielded value or raise StopIteration. + """ + if val is None: + if tb is None: + raise typ + val = typ() + if tb is not None: + val = val.with_traceback(tb) + raise val + + def close(self): + """Raise GeneratorExit inside coroutine. + """ + try: + self.throw(GeneratorExit) + except (GeneratorExit, StopIteration): + pass + else: + raise RuntimeError('coroutine ignored GeneratorExit') + + @classmethod + def __subclasshook__(cls, C): + if cls is Coroutine: + mro = C.__mro__ + for method in ('__await__', 'send', 'throw', 'close'): + for base in mro: + if method in base.__dict__: + break + else: + return NotImplemented + return True + return NotImplemented + + return Coroutine + + +### +# make all ABCs available in this module + +try: + Generator = _collections_abc.Generator +except AttributeError: + Generator = mk_gen() + +try: + Awaitable = _collections_abc.Awaitable +except AttributeError: + Awaitable = mk_awaitable() + +try: + Coroutine = _collections_abc.Coroutine +except AttributeError: + Coroutine = mk_coroutine() + +try: + from inspect import isawaitable +except ImportError: + def isawaitable(obj): + return isinstance(obj, Awaitable) + + +### +# allow patching the stdlib + +PATCHED = {} + + +def patch(patch_inspect=True): + """ + Main entry point for patching the ``collections.abc`` and ``inspect`` + standard library modules. + """ + PATCHED['collections.abc.Generator'] = _collections_abc.Generator = Generator + PATCHED['collections.abc.Coroutine'] = _collections_abc.Coroutine = Coroutine + PATCHED['collections.abc.Awaitable'] = _collections_abc.Awaitable = Awaitable + + if patch_inspect: + import inspect + PATCHED['inspect.isawaitable'] = inspect.isawaitable = isawaitable diff --git a/lib/chardet/charsetprober.py b/lib/chardet/charsetprober.py index bc5f9c241d..bc9f1e0706 100644 --- a/lib/chardet/charsetprober.py +++ b/lib/chardet/charsetprober.py @@ -28,7 +28,6 @@ import logging import re -from io import BytesIO from .enums import ProbingState @@ -79,16 +78,16 @@ def filter_international_words(buf): This filter applies to all scripts which do not use English characters. """ - filtered = BytesIO() + filtered = bytearray() # This regex expression filters out only words that have at-least one # international character. The word may include one marker character at # the end. - words = re.findall( - b'[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?', buf) + words = re.findall(b'[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?', + buf) for word in words: - filtered.write(word[:-1]) + filtered.extend(word[:-1]) # If the last character in the word is a marker, replace it with a # space as markers shouldn't affect our analysis (they are used @@ -97,9 +96,9 @@ def filter_international_words(buf): last_char = word[-1:] if not last_char.isalpha() and last_char < b'\x80': last_char = b' ' - filtered.write(last_char) + filtered.extend(last_char) - return filtered.getvalue() + return filtered @staticmethod def filter_with_english_letters(buf): @@ -113,7 +112,7 @@ def filter_with_english_letters(buf): characters and extended ASCII characters, but is currently only used by ``Latin1Prober``. """ - filtered = BytesIO() + filtered = bytearray() in_tag = False prev = 0 @@ -132,15 +131,15 @@ def filter_with_english_letters(buf): if curr > prev and not in_tag: # Keep everything after last non-extended-ASCII, # non-alphabetic character - filtered.write(buf[prev:curr]) + filtered.extend(buf[prev:curr]) # Output a space to delimit stretch we kept - filtered.write(b' ') + filtered.extend(b' ') prev = curr + 1 # If we're not in a tag... if not in_tag: # Keep everything after last non-extended-ASCII, non-alphabetic # character - filtered.write(buf[prev:]) + filtered.extend(buf[prev:]) - return filtered.getvalue() + return filtered diff --git a/lib/chardet/enums.py b/lib/chardet/enums.py index 49b3e67812..edae6e8967 100644 --- a/lib/chardet/enums.py +++ b/lib/chardet/enums.py @@ -1,16 +1,11 @@ """ All of the Enums that are used throughout the chardet package. -:author: Dan Blanchard (dblanchard@ets.org) +:author: Dan Blanchard (dan.blanchard@gmail.com) """ -try: - from enum import IntEnum -except ImportError: - from enum34 import IntEnum - -class InputState(IntEnum): +class InputState(object): """ This enum represents the different states a universal detector can be in. """ @@ -19,7 +14,7 @@ class InputState(IntEnum): high_byte = 2 -class LanguageFilter(IntEnum): +class LanguageFilter(object): """ This enum represents the different language filters we can apply to a ``UniversalDetector``. @@ -34,7 +29,7 @@ class LanguageFilter(IntEnum): cjk = chinese | japanese | korean -class ProbingState(IntEnum): +class ProbingState(object): """ This enum represents the different states a prober can be in. """ @@ -43,7 +38,7 @@ class ProbingState(IntEnum): not_me = 2 -class MachineState(IntEnum): +class MachineState(object): """ This enum represents the different states a state machine can be in. """ diff --git a/lib/chardet/jpcntx.py b/lib/chardet/jpcntx.py index 4c180def74..5f57dc9c21 100644 --- a/lib/chardet/jpcntx.py +++ b/lib/chardet/jpcntx.py @@ -131,7 +131,7 @@ def __init__(self): def reset(self): self._total_rel = 0 # total sequence received - # category counters, each interger counts sequence in its category + # category counters, each integer counts sequence in its category self._rel_sample = [0] * self.NUM_OF_CATEGORY # if last byte in current buffer is not the last byte of a character, # we need to know how many bytes to skip in next buffer diff --git a/lib/chardet/mbcssm.py b/lib/chardet/mbcssm.py index 9a01af7688..1538b000f5 100644 --- a/lib/chardet/mbcssm.py +++ b/lib/chardet/mbcssm.py @@ -323,7 +323,7 @@ # To be accurate, the length of class 6 can be either 2 or 4. # But it is not necessary to discriminate between the two since -# it is used for frequency analysis only, and we are validing +# it is used for frequency analysis only, and we are validating # each code range there as well. So it is safe to set it to be # 2 here. GB2312_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 1, 2) diff --git a/lib/chardet/sbcharsetprober.py b/lib/chardet/sbcharsetprober.py index 03553732ec..8d08a3a8d2 100644 --- a/lib/chardet/sbcharsetprober.py +++ b/lib/chardet/sbcharsetprober.py @@ -103,7 +103,7 @@ def feed(self, byte_str): self._state = ProbingState.found_it elif cf < self.NEGATIVE_SHORTCUT_THRESHOLD: self.logger.debug('%s confidence = %s, below negative ' - 'shortcut threshhold %s', + 'shortcut threshold %s', self._model['charset_name'], cf, self.NEGATIVE_SHORTCUT_THRESHOLD) self._state = ProbingState.not_me diff --git a/lib/chardet/sbcsgroupprober.py b/lib/chardet/sbcsgroupprober.py index 5c2bc53e0c..98e95dc1a3 100644 --- a/lib/chardet/sbcsgroupprober.py +++ b/lib/chardet/sbcsgroupprober.py @@ -33,7 +33,7 @@ Ibm866Model, Ibm855Model) from .langgreekmodel import Latin7GreekModel, Win1253GreekModel from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel -from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel +# from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel from .langthaimodel import TIS620ThaiModel from .langhebrewmodel import Win1255HebrewModel from .hebrewprober import HebrewProber @@ -63,9 +63,9 @@ def __init__(self): ] hebrew_prober = HebrewProber() logical_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, - False, hebrew_prober) + False, hebrew_prober) visual_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, True, - hebrew_prober) + hebrew_prober) hebrew_prober.set_model_probers(logical_hebrew_prober, visual_hebrew_prober) self.probers.extend([hebrew_prober, logical_hebrew_prober, visual_hebrew_prober]) diff --git a/lib/chardet/universaldetector.py b/lib/chardet/universaldetector.py index 33712a73be..627200f8dd 100644 --- a/lib/chardet/universaldetector.py +++ b/lib/chardet/universaldetector.py @@ -29,7 +29,7 @@ Module containing the UniversalDetector detector class, which is the primary class a user of ``chardet`` should use. -:author: Mark Pilgrim (intial port to Python) +:author: Mark Pilgrim (initial port to Python) :author: Shy Shalom (original C code) :author: Dan Blanchard (major refactoring for 3.0) :author: Ian Cordasco @@ -122,12 +122,10 @@ def feed(self, byte_str): if byte_str.startswith(codecs.BOM_UTF8): # EF BB BF UTF-8 with BOM self.result = {'encoding': "UTF-8-SIG", 'confidence': 1.0} - elif byte_str.startswith(codecs.BOM_UTF32_LE): + elif byte_str.startswith(codecs.BOM_UTF32_LE) or byte_str.startswith(codecs.BOM_UTF32_BE): # FF FE 00 00 UTF-32, little-endian BOM - self.result = {'encoding': "UTF-32LE", 'confidence': 1.0} - elif byte_str.startswith(codecs.BOM_UTF32_BE): # 00 00 FE FF UTF-32, big-endian BOM - self.result = {'encoding': "UTF-32BE", 'confidence': 1.0} + self.result = {'encoding': "UTF-32", 'confidence': 1.0} elif byte_str.startswith(b'\xFE\xFF\x00\x00'): # FE FF 00 00 UCS-4, unusual octet order BOM (3412) self.result = {'encoding': "X-ISO-10646-UCS-4-3412", @@ -136,12 +134,10 @@ def feed(self, byte_str): # 00 00 FF FE UCS-4, unusual octet order BOM (2143) self.result = {'encoding': "X-ISO-10646-UCS-4-2143", 'confidence': 1.0} - elif byte_str.startswith(codecs.BOM_LE): + elif byte_str.startswith(codecs.BOM_LE) or byte_str.startswith(codecs.BOM_BE): # FF FE UTF-16, little endian BOM - self.result = {'encoding': "UTF-16LE", 'confidence': 1.0} - elif byte_str.startswith(codecs.BOM_BE): # FE FF UTF-16, big endian BOM - self.result = {'encoding': "UTF-16BE", 'confidence': 1.0} + self.result = {'encoding': "UTF-16", 'confidence': 1.0} self._got_data = True if self.result['encoding'] is not None: @@ -207,7 +203,7 @@ def close(self): return self.done = True - if self._input_state == InputState.pure_ascii: + if self._input_state in (InputState.pure_ascii, InputState.esc_ascii): self.result = {'encoding': 'ascii', 'confidence': 1.0} return self.result @@ -228,8 +224,8 @@ def close(self): return self.result if self.logger.getEffectiveLevel() == logging.DEBUG: - self.logger.debug('no probers hit minimum threshhold') - for prober in self._charset_probers[0].mProbers: + self.logger.debug('no probers hit minimum threshold') + for prober in self._charset_probers[0].probers: if not prober: continue self.logger.debug('%s confidence = %s', prober.charset_name, diff --git a/lib/concurrent/futures/__init__.py b/lib/concurrent/futures/__init__.py index fef528199e..428b14bdfe 100644 --- a/lib/concurrent/futures/__init__.py +++ b/lib/concurrent/futures/__init__.py @@ -16,8 +16,8 @@ as_completed) from concurrent.futures.thread import ThreadPoolExecutor -# Jython doesn't have multiprocessing try: from concurrent.futures.process import ProcessPoolExecutor except ImportError: + # some platforms don't have multiprocessing pass diff --git a/lib/concurrent/futures/_base.py b/lib/concurrent/futures/_base.py index 6f0c0f3b91..2936c46b16 100644 --- a/lib/concurrent/futures/_base.py +++ b/lib/concurrent/futures/_base.py @@ -1,18 +1,12 @@ # Copyright 2009 Brian Quinlan. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. -from __future__ import with_statement +import collections import logging import threading +import itertools import time -from concurrent.futures._compat import reraise - -try: - from collections import namedtuple -except ImportError: - from concurrent.futures._compat import namedtuple - __author__ = 'Brian Quinlan (brian@sweetapp.com)' FIRST_COMPLETED = 'FIRST_COMPLETED' @@ -188,7 +182,8 @@ def as_completed(fs, timeout=None): Returns: An iterator that yields the given Futures as they complete (finished or - cancelled). + cancelled). If any given Futures are duplicated, they will be returned + once. Raises: TimeoutError: If the entire result iterator could not be generated @@ -197,11 +192,12 @@ def as_completed(fs, timeout=None): if timeout is not None: end_time = timeout + time.time() + fs = set(fs) with _AcquireFutures(fs): finished = set( f for f in fs if f._state in [CANCELLED_AND_NOTIFIED, FINISHED]) - pending = set(fs) - finished + pending = fs - finished waiter = _create_and_install_waiters(fs, _AS_COMPLETED) try: @@ -231,9 +227,10 @@ def as_completed(fs, timeout=None): finally: for f in fs: - f._waiters.remove(waiter) + with f._condition: + f._waiters.remove(waiter) -DoneAndNotDoneFutures = namedtuple( +DoneAndNotDoneFutures = collections.namedtuple( 'DoneAndNotDoneFutures', 'done not_done') def wait(fs, timeout=None, return_when=ALL_COMPLETED): """Wait for the futures in the given sequence to complete. @@ -278,7 +275,8 @@ def wait(fs, timeout=None, return_when=ALL_COMPLETED): waiter.event.wait(timeout) for f in fs: - f._waiters.remove(waiter) + with f._condition: + f._waiters.remove(waiter) done.update(waiter.finished_futures) return DoneAndNotDoneFutures(done, set(fs) - done) @@ -356,7 +354,7 @@ def done(self): def __get_result(self): if self._exception: - reraise(self._exception, self._traceback) + raise type(self._exception), self._exception, self._traceback else: return self._result @@ -497,8 +495,8 @@ def set_running_or_notify_cancel(self): return True else: LOGGER.critical('Future %s in unexpected state: %s', - id(self.future), - self.future._state) + id(self), + self._state) raise RuntimeError('Future in unexpected state') def set_result(self, result): @@ -572,17 +570,21 @@ def map(self, fn, *iterables, **kwargs): if timeout is not None: end_time = timeout + time.time() - fs = [self.submit(fn, *args) for args in zip(*iterables)] + fs = [self.submit(fn, *args) for args in itertools.izip(*iterables)] - try: - for future in fs: - if timeout is None: - yield future.result() - else: - yield future.result(end_time - time.time()) - finally: - for future in fs: - future.cancel() + # Yield must be hidden in closure so that the futures are submitted + # before the first iterator value is required. + def result_iterator(): + try: + for future in fs: + if timeout is None: + yield future.result() + else: + yield future.result(end_time - time.time()) + finally: + for future in fs: + future.cancel() + return result_iterator() def shutdown(self, wait=True): """Clean-up the resources associated with the Executor. diff --git a/lib/concurrent/futures/_compat.py b/lib/concurrent/futures/_compat.py deleted file mode 100644 index e77cf0e546..0000000000 --- a/lib/concurrent/futures/_compat.py +++ /dev/null @@ -1,111 +0,0 @@ -from keyword import iskeyword as _iskeyword -from operator import itemgetter as _itemgetter -import sys as _sys - - -def namedtuple(typename, field_names): - """Returns a new subclass of tuple with named fields. - - >>> Point = namedtuple('Point', 'x y') - >>> Point.__doc__ # docstring for the new class - 'Point(x, y)' - >>> p = Point(11, y=22) # instantiate with positional args or keywords - >>> p[0] + p[1] # indexable like a plain tuple - 33 - >>> x, y = p # unpack like a regular tuple - >>> x, y - (11, 22) - >>> p.x + p.y # fields also accessable by name - 33 - >>> d = p._asdict() # convert to a dictionary - >>> d['x'] - 11 - >>> Point(**d) # convert from a dictionary - Point(x=11, y=22) - >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields - Point(x=100, y=22) - - """ - - # Parse and validate the field names. Validation serves two purposes, - # generating informative error messages and preventing template injection attacks. - if isinstance(field_names, basestring): - field_names = field_names.replace(',', ' ').split() # names separated by whitespace and/or commas - field_names = tuple(map(str, field_names)) - for name in (typename,) + field_names: - if not all(c.isalnum() or c=='_' for c in name): - raise ValueError('Type names and field names can only contain alphanumeric characters and underscores: %r' % name) - if _iskeyword(name): - raise ValueError('Type names and field names cannot be a keyword: %r' % name) - if name[0].isdigit(): - raise ValueError('Type names and field names cannot start with a number: %r' % name) - seen_names = set() - for name in field_names: - if name.startswith('_'): - raise ValueError('Field names cannot start with an underscore: %r' % name) - if name in seen_names: - raise ValueError('Encountered duplicate field name: %r' % name) - seen_names.add(name) - - # Create and fill-in the class template - numfields = len(field_names) - argtxt = repr(field_names).replace("'", "")[1:-1] # tuple repr without parens or quotes - reprtxt = ', '.join('%s=%%r' % name for name in field_names) - dicttxt = ', '.join('%r: t[%d]' % (name, pos) for pos, name in enumerate(field_names)) - template = '''class %(typename)s(tuple): - '%(typename)s(%(argtxt)s)' \n - __slots__ = () \n - _fields = %(field_names)r \n - def __new__(_cls, %(argtxt)s): - return _tuple.__new__(_cls, (%(argtxt)s)) \n - @classmethod - def _make(cls, iterable, new=tuple.__new__, len=len): - 'Make a new %(typename)s object from a sequence or iterable' - result = new(cls, iterable) - if len(result) != %(numfields)d: - raise TypeError('Expected %(numfields)d arguments, got %%d' %% len(result)) - return result \n - def __repr__(self): - return '%(typename)s(%(reprtxt)s)' %% self \n - def _asdict(t): - 'Return a new dict which maps field names to their values' - return {%(dicttxt)s} \n - def _replace(_self, **kwds): - 'Return a new %(typename)s object replacing specified fields with new values' - result = _self._make(map(kwds.pop, %(field_names)r, _self)) - if kwds: - raise ValueError('Got unexpected field names: %%r' %% kwds.keys()) - return result \n - def __getnewargs__(self): - return tuple(self) \n\n''' % locals() - for i, name in enumerate(field_names): - template += ' %s = _property(_itemgetter(%d))\n' % (name, i) - - # Execute the template string in a temporary namespace and - # support tracing utilities by setting a value for frame.f_globals['__name__'] - namespace = dict(_itemgetter=_itemgetter, __name__='namedtuple_%s' % typename, - _property=property, _tuple=tuple) - try: - exec(template, namespace) - except SyntaxError: - e = _sys.exc_info()[1] - raise SyntaxError(e.message + ':\n' + template) - result = namespace[typename] - - # For pickling to work, the __module__ variable needs to be set to the frame - # where the named tuple is created. Bypass this step in enviroments where - # sys._getframe is not defined (Jython for example). - if hasattr(_sys, '_getframe'): - result.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__') - - return result - - -if _sys.version_info[0] < 3: - def reraise(exc, traceback): - locals_ = {'exc_type': type(exc), 'exc_value': exc, 'traceback': traceback} - exec('raise exc_type, exc_value, traceback', {}, locals_) -else: - def reraise(exc, traceback): - # Tracebacks are embedded in exceptions in Python 3 - raise exc diff --git a/lib/concurrent/futures/process.py b/lib/concurrent/futures/process.py index 98684f8e8e..72528410c1 100644 --- a/lib/concurrent/futures/process.py +++ b/lib/concurrent/futures/process.py @@ -43,20 +43,14 @@ _ResultItems in "Request Q" """ -from __future__ import with_statement import atexit +from concurrent.futures import _base +import Queue as queue import multiprocessing import threading import weakref import sys -from concurrent.futures import _base - -try: - import queue -except ImportError: - import Queue as queue - __author__ = 'Brian Quinlan (brian@sweetapp.com)' # Workers are created as daemon threads and processes. This is done to allow the @@ -79,11 +73,11 @@ def _python_exit(): global _shutdown _shutdown = True - items = list(_threads_queues.items()) + items = list(_threads_queues.items()) if _threads_queues else () for t, q in items: q.put(None) for t, q in items: - t.join() + t.join(sys.maxint) # Controls how many more calls than processes will be queued in the call queue. # A smaller number will mean that processes spend more time idle waiting for @@ -220,6 +214,8 @@ def shutdown_one_process(): work_item.future.set_exception(result_item.exception) else: work_item.future.set_result(result_item.result) + # Delete references to object. See issue16284 + del work_item # Check whether we should start shutting down. executor = executor_reference() # No more work items can be added if: @@ -351,7 +347,7 @@ def shutdown(self, wait=True): # Wake up queue management thread self._result_queue.put(None) if wait: - self._queue_management_thread.join() + self._queue_management_thread.join(sys.maxint) # To reduce the risk of openning too many files, remove references to # objects that use file descriptors. self._queue_management_thread = None diff --git a/lib/concurrent/futures/thread.py b/lib/concurrent/futures/thread.py index 930d16735f..85ab4b7432 100644 --- a/lib/concurrent/futures/thread.py +++ b/lib/concurrent/futures/thread.py @@ -3,19 +3,13 @@ """Implements ThreadPoolExecutor.""" -from __future__ import with_statement import atexit +from concurrent.futures import _base +import Queue as queue import threading import weakref import sys -from concurrent.futures import _base - -try: - import queue -except ImportError: - import Queue as queue - __author__ = 'Brian Quinlan (brian@sweetapp.com)' # Workers are created as daemon threads. This is done to allow the interpreter @@ -38,11 +32,11 @@ def _python_exit(): global _shutdown _shutdown = True - items = list(_threads_queues.items()) + items = list(_threads_queues.items()) if _threads_queues else () for t, q in items: q.put(None) for t, q in items: - t.join() + t.join(sys.maxint) atexit.register(_python_exit) @@ -71,6 +65,8 @@ def _worker(executor_reference, work_queue): work_item = work_queue.get(block=True) if work_item is not None: work_item.run() + # Delete references to object. See issue16284 + del work_item continue executor = executor_reference() # Exit if: @@ -134,5 +130,5 @@ def shutdown(self, wait=True): self._work_queue.put(None) if wait: for t in self._threads: - t.join() + t.join(sys.maxint) shutdown.__doc__ = _base.Executor.shutdown.__doc__ diff --git a/lib/contextlib2.py b/lib/contextlib2.py index 2b80384eab..a6acf65323 100644 --- a/lib/contextlib2.py +++ b/lib/contextlib2.py @@ -179,7 +179,7 @@ def __exit__(self, *exc_info): self.thing.close() -class _RedirectStream: +class _RedirectStream(object): _stream = None @@ -219,7 +219,7 @@ class redirect_stderr(_RedirectStream): _stream = "stderr" -class suppress: +class suppress(object): """Context manager to suppress specified exceptions After the exception is suppressed, execution proceeds with the next @@ -288,6 +288,20 @@ def _reraise_with_existing_context(exc_details): exc_type, exc_value, exc_tb = exc_details exec ("raise exc_type, exc_value, exc_tb") +# Handle old-style classes if they exist +try: + from types import InstanceType +except ImportError: + # Python 3 doesn't have old-style classes + _get_type = type +else: + # Need to handle old-style context managers on Python 2 + def _get_type(obj): + obj_type = type(obj) + if obj_type is InstanceType: + return obj.__class__ # Old-style class + return obj_type # New-style class + # Inspired by discussions on http://bugs.python.org/issue13585 class ExitStack(object): """Context manager for dynamic management of a stack of exit callbacks @@ -328,7 +342,7 @@ def push(self, exit): """ # We use an unbound method rather than a bound method to follow # the standard lookup behaviour for special methods - _cb_type = type(exit) + _cb_type = _get_type(exit) try: exit_method = _cb_type.__exit__ except AttributeError: @@ -358,7 +372,7 @@ def enter_context(self, cm): returns the result of the __enter__ method. """ # We look up the special methods on the type to match the with statement - _cm_type = type(cm) + _cm_type = _get_type(cm) _exit = _cm_type.__exit__ result = _cm_type.__enter__(cm) self._push_cm_exit(cm, _exit) diff --git a/lib/dogpile/__init__.py b/lib/dogpile/__init__.py index f48ad10528..d9054605db 100644 --- a/lib/dogpile/__init__.py +++ b/lib/dogpile/__init__.py @@ -1,6 +1,4 @@ -# See http://peak.telecommunity.com/DevCenter/setuptools#namespace-packages -try: - __import__('pkg_resources').declare_namespace(__name__) -except ImportError: - from pkgutil import extend_path - __path__ = extend_path(__path__, __name__) +__version__ = '0.6.1' + +from .lock import Lock # noqa +from .lock import NeedRegenerationException # noqa \ No newline at end of file diff --git a/lib/dogpile/cache/__init__.py b/lib/dogpile/cache/__init__.py index 13e6fdad13..fb57cbcc2e 100644 --- a/lib/dogpile/cache/__init__.py +++ b/lib/dogpile/cache/__init__.py @@ -1,3 +1,4 @@ -__version__ = '0.5.7' - from .region import CacheRegion, register_backend, make_region # noqa + +# backwards compat +from .. import __version__ # noqa diff --git a/lib/dogpile/cache/api.py b/lib/dogpile/cache/api.py index 85b6de181c..dbab2db384 100644 --- a/lib/dogpile/cache/api.py +++ b/lib/dogpile/cache/api.py @@ -1,5 +1,5 @@ import operator -from .compat import py3k +from ..util.compat import py3k class NoValue(object): @@ -83,6 +83,9 @@ def from_config_dict(cls, config_dict, prefix): ) ) + def has_lock_timeout(self): + return False + def get_mutex(self, key): """Return an optional mutexing object for the given key. @@ -152,12 +155,21 @@ def set(self, key, value): # pragma NO COVERAGE def set_multi(self, mapping): # pragma NO COVERAGE """Set multiple values in the cache. - The key will be whatever was passed + ``mapping`` is a dict in which + the key will be whatever was passed to the registry, processed by the "key mangling" function, if any. The value will always be an instance of :class:`.CachedValue`. + When implementing a new :class:`.CacheBackend` or cutomizing via + :class:`.ProxyBackend`, be aware that when this method is invoked by + :meth:`.Region.get_or_create_multi`, the ``mapping`` values are the + same ones returned to the upstream caller. If the subclass alters the + values in any way, it must not do so 'in-place' on the ``mapping`` dict + -- that will have the undesirable effect of modifying the returned + values as well. + .. versionadded:: 0.5.0 """ diff --git a/lib/dogpile/cache/backends/file.py b/lib/dogpile/cache/backends/file.py index 42d749299c..309c055a2e 100644 --- a/lib/dogpile/cache/backends/file.py +++ b/lib/dogpile/cache/backends/file.py @@ -7,10 +7,10 @@ """ from __future__ import with_statement -from dogpile.cache.api import CacheBackend, NO_VALUE +from ..api import CacheBackend, NO_VALUE from contextlib import contextmanager -from dogpile.cache import compat -from dogpile.cache import util +from ...util import compat +from ... import util import os __all__ = 'DBMBackend', 'FileLock', 'AbstractFileLock' diff --git a/lib/dogpile/cache/backends/memcached.py b/lib/dogpile/cache/backends/memcached.py index 80acc77b42..6758a99802 100644 --- a/lib/dogpile/cache/backends/memcached.py +++ b/lib/dogpile/cache/backends/memcached.py @@ -6,9 +6,9 @@ """ -from dogpile.cache.api import CacheBackend, NO_VALUE -from dogpile.cache import compat -from dogpile.cache import util +from ..api import CacheBackend, NO_VALUE +from ...util import compat +from ... import util import random import time @@ -24,15 +24,16 @@ class MemcachedLock(object): """ - def __init__(self, client_fn, key): + def __init__(self, client_fn, key, timeout=0): self.client_fn = client_fn self.key = "_lock" + key + self.timeout = timeout def acquire(self, wait=True): client = self.client_fn() i = 0 while True: - if client.add(self.key, 1): + if client.add(self.key, 1, self.timeout): return True elif not wait: return False @@ -62,6 +63,12 @@ class GenericMemcachedBackend(CacheBackend): processes will be talking to the same memcached instance. When left at False, dogpile will coordinate on a regular threading mutex. + :param lock_timeout: integer, number of seconds after acquiring a lock that + memcached should expire it. This argument is only valid when + ``distributed_lock`` is ``True``. + + .. versionadded:: 0.5.7 + :param memcached_expire_time: integer, when present will be passed as the ``time`` parameter to ``pylibmc.Client.set``. This is used to set the memcached expiry time for a value. @@ -106,9 +113,13 @@ def __init__(self, arguments): # automatically. self.url = util.to_list(arguments['url']) self.distributed_lock = arguments.get('distributed_lock', False) + self.lock_timeout = arguments.get('lock_timeout', 0) self.memcached_expire_time = arguments.get( 'memcached_expire_time', 0) + def has_lock_timeout(self): + return self.lock_timeout != 0 + def _imports(self): """client library imports go here.""" raise NotImplementedError() @@ -141,7 +152,8 @@ def client(self): def get_mutex(self, key): if self.distributed_lock: - return MemcachedLock(lambda: self.client, key) + return MemcachedLock(lambda: self.client, key, + timeout=self.lock_timeout) else: return None @@ -330,9 +342,10 @@ class RepairBMemcachedAPI(bmemcached.Client): """ - def add(self, key, value): + def add(self, key, value, timeout=0): try: - return super(RepairBMemcachedAPI, self).add(key, value) + return super(RepairBMemcachedAPI, self).add( + key, value, timeout) except ValueError: return False diff --git a/lib/dogpile/cache/backends/memory.py b/lib/dogpile/cache/backends/memory.py index 2f9bd3a4ec..e2083f7f05 100644 --- a/lib/dogpile/cache/backends/memory.py +++ b/lib/dogpile/cache/backends/memory.py @@ -10,8 +10,8 @@ """ -from dogpile.cache.api import CacheBackend, NO_VALUE -from dogpile.cache.compat import pickle +from ..api import CacheBackend, NO_VALUE +from ...util.compat import pickle class MemoryBackend(CacheBackend): diff --git a/lib/dogpile/cache/backends/null.py b/lib/dogpile/cache/backends/null.py index c1f46a9d6d..603cca3f61 100644 --- a/lib/dogpile/cache/backends/null.py +++ b/lib/dogpile/cache/backends/null.py @@ -10,15 +10,15 @@ """ -from dogpile.cache.api import CacheBackend, NO_VALUE +from ..api import CacheBackend, NO_VALUE __all__ = ['NullBackend'] class NullLock(object): - def acquire(self): - pass + def acquire(self, wait=True): + return True def release(self): pass diff --git a/lib/dogpile/cache/backends/redis.py b/lib/dogpile/cache/backends/redis.py index b4d93e8b52..d665320a76 100644 --- a/lib/dogpile/cache/backends/redis.py +++ b/lib/dogpile/cache/backends/redis.py @@ -7,8 +7,8 @@ """ from __future__ import absolute_import -from dogpile.cache.api import CacheBackend, NO_VALUE -from dogpile.cache.compat import pickle, u +from ..api import CacheBackend, NO_VALUE +from ...util.compat import pickle, u redis = None @@ -91,6 +91,7 @@ class RedisBackend(CacheBackend): """ def __init__(self, arguments): + arguments = arguments.copy() self._imports() self.url = arguments.pop('url', None) self.host = arguments.pop('host', 'localhost') diff --git a/lib/dogpile/cache/proxy.py b/lib/dogpile/cache/proxy.py index 7fe49d6e5d..15c6b5746f 100644 --- a/lib/dogpile/cache/proxy.py +++ b/lib/dogpile/cache/proxy.py @@ -85,8 +85,8 @@ def delete(self, key): def get_multi(self, keys): return self.proxied.get_multi(keys) - def set_multi(self, keys): - self.proxied.set_multi(keys) + def set_multi(self, mapping): + self.proxied.set_multi(mapping) def delete_multi(self, keys): self.proxied.delete_multi(keys) diff --git a/lib/dogpile/cache/region.py b/lib/dogpile/cache/region.py index afa2b547c1..18e626d765 100644 --- a/lib/dogpile/cache/region.py +++ b/lib/dogpile/cache/region.py @@ -1,12 +1,12 @@ from __future__ import with_statement -from dogpile.core import Lock, NeedRegenerationException -from dogpile.core.nameregistry import NameRegistry +from .. import Lock, NeedRegenerationException +from ..util import NameRegistry from . import exception -from .util import function_key_generator, PluginLoader, \ - memoized_property, coerce_string_conf, function_multi_key_generator +from ..util import PluginLoader, memoized_property, coerce_string_conf +from .util import function_key_generator, function_multi_key_generator from .api import NO_VALUE, CachedValue from .proxy import ProxyBackend -from . import compat +from ..util import compat import time import datetime from numbers import Number @@ -169,10 +169,7 @@ def __init__( self.name = name self.function_key_generator = function_key_generator self.function_multi_key_generator = function_multi_key_generator - if key_mangler: - self.key_mangler = key_mangler - else: - self.key_mangler = None + self.key_mangler = self._user_defined_key_mangler = key_mangler self._hard_invalidated = None self._soft_invalidated = None self.async_creation_runner = async_creation_runner @@ -183,7 +180,8 @@ def configure( arguments=None, _config_argument_dict=None, _config_prefix=None, - wrap=None + wrap=None, + replace_existing_backend=False, ): """Configure a :class:`.CacheRegion`. @@ -223,12 +221,20 @@ def configure( :ref:`changing_backend_behavior` + :param replace_existing_backend: if True, the existing cache backend + will be replaced. Without this flag, an exception is raised if + a backend is already configured. + + .. versionadded:: 0.5.7 + + """ - if "backend" in self.__dict__: + if "backend" in self.__dict__ and not replace_existing_backend: raise exception.RegionAlreadyConfigured( "This region is already " - "configured with backend: %s" + "configured with backend: %s. " + "Specify replace_existing_backend=True to replace." % self.backend) backend_cls = _backend_loader.load(backend) if _config_argument_dict: @@ -248,7 +254,7 @@ def configure( raise exception.ValidationError( 'expiration_time is not a number or timedelta.') - if self.key_mangler is None: + if not self._user_defined_key_mangler: self.key_mangler = self.backend.key_mangler self._lock_registry = NameRegistry(self._create_mutex) @@ -666,6 +672,13 @@ def get_or_create_multi( and :meth:`.Region.set_multi` to get and set values from the backend. + If you are using a :class:`.CacheBackend` or :class:`.ProxyBackend` + that modifies values, take note this function invokes + ``.set_multi()`` for newly generated values using the same values it + returns to the calling function. A correct implementation of + ``.set_multi()`` will not modify values in-place on the submitted + ``mapping`` dict. + :param keys: Sequence of keys to be retrieved. :param creator: function which accepts a sequence of keys and @@ -1061,6 +1074,7 @@ def refresh(*arg, **kw): decorate.invalidate = invalidate decorate.refresh = refresh decorate.get = get + decorate.original = fn return decorate return decorator diff --git a/lib/dogpile/cache/util.py b/lib/dogpile/cache/util.py index 51fe483fd6..aca8260d58 100644 --- a/lib/dogpile/cache/util.py +++ b/lib/dogpile/cache/util.py @@ -1,55 +1,6 @@ from hashlib import sha1 import inspect -import re -import collections -from . import compat - - -def coerce_string_conf(d): - result = {} - for k, v in d.items(): - if not isinstance(v, compat.string_types): - result[k] = v - continue - - v = v.strip() - if re.match(r'^[-+]?\d+$', v): - result[k] = int(v) - elif re.match(r'^[-+]?(?:\d+(?:\.\d*)?|\.\d+)(?:[eE][-+]?\d+)?$', v): - result[k] = float(v) - elif v.lower() in ('false', 'true'): - result[k] = v.lower() == 'true' - elif v == 'None': - result[k] = None - else: - result[k] = v - return result - - -class PluginLoader(object): - def __init__(self, group): - self.group = group - self.impls = {} - - def load(self, name): - if name in self.impls: - return self.impls[name]() - else: # pragma NO COVERAGE - import pkg_resources - for impl in pkg_resources.iter_entry_points( - self.group, name): - self.impls[name] = impl.load - return impl.load() - else: - raise Exception( - "Can't load plugin %s %s" % - (self.group, name)) - - def register(self, name, modulepath, objname): - def load(): - mod = __import__(modulepath, fromlist=[objname]) - return getattr(mod, objname) - self.impls[name] = load +from ..util import compat def function_key_generator(namespace, fn, to_str=compat.string_type): @@ -125,71 +76,3 @@ def mangle(key): return mangle -class memoized_property(object): - """A read-only @property that is only evaluated once.""" - def __init__(self, fget, doc=None): - self.fget = fget - self.__doc__ = doc or fget.__doc__ - self.__name__ = fget.__name__ - - def __get__(self, obj, cls): - if obj is None: - return self - obj.__dict__[self.__name__] = result = self.fget(obj) - return result - - -def to_list(x, default=None): - """Coerce to a list.""" - if x is None: - return default - if not isinstance(x, (list, tuple)): - return [x] - else: - return x - - -class KeyReentrantMutex(object): - - def __init__(self, key, mutex, keys): - self.key = key - self.mutex = mutex - self.keys = keys - - @classmethod - def factory(cls, mutex): - # this collection holds zero or one - # thread idents as the key; a set of - # keynames held as the value. - keystore = collections.defaultdict(set) - - def fac(key): - return KeyReentrantMutex(key, mutex, keystore) - return fac - - def acquire(self, wait=True): - current_thread = compat.threading.current_thread().ident - keys = self.keys.get(current_thread) - if keys is not None and \ - self.key not in keys: - # current lockholder, new key. add it in - keys.add(self.key) - return True - elif self.mutex.acquire(wait=wait): - # after acquire, create new set and add our key - self.keys[current_thread].add(self.key) - return True - else: - return False - - def release(self): - current_thread = compat.threading.current_thread().ident - keys = self.keys.get(current_thread) - assert keys is not None, "this thread didn't do the acquire" - assert self.key in keys, "No acquire held for key '%s'" % self.key - keys.remove(self.key) - if not keys: - # when list of keys empty, remove - # the thread ident and unlock. - del self.keys[current_thread] - self.mutex.release() diff --git a/lib/dogpile/core.py b/lib/dogpile/core.py new file mode 100644 index 0000000000..2bcfaf8136 --- /dev/null +++ b/lib/dogpile/core.py @@ -0,0 +1,17 @@ +"""Compatibility namespace for those using dogpile.core. + +As of dogpile.cache 0.6.0, dogpile.core as a separate package +is no longer used by dogpile.cache. + +Note that this namespace will not take effect if an actual +dogpile.core installation is present. + +""" + +from .util import nameregistry # noqa +from .util import readwrite_lock # noqa +from .util.readwrite_lock import ReadWriteMutex # noqa +from .util.nameregistry import NameRegistry # noqa +from .lock import Lock # noqa +from .lock import NeedRegenerationException # noqa +from . import __version__ # noqa diff --git a/lib/dogpile/core/__init__.py b/lib/dogpile/core/__init__.py deleted file mode 100644 index fb9d756d67..0000000000 --- a/lib/dogpile/core/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ -from .dogpile import NeedRegenerationException, Lock -from .nameregistry import NameRegistry -from .readwrite_lock import ReadWriteMutex -from .legacy import Dogpile, SyncReaderDogpile - -__all__ = [ - 'Dogpile', 'SyncReaderDogpile', 'NeedRegenerationException', - 'NameRegistry', 'ReadWriteMutex', 'Lock'] - -__version__ = '0.4.1' - diff --git a/lib/dogpile/core/legacy.py b/lib/dogpile/core/legacy.py deleted file mode 100644 index dad4e1609d..0000000000 --- a/lib/dogpile/core/legacy.py +++ /dev/null @@ -1,154 +0,0 @@ -from __future__ import with_statement - -from .util import threading -from .readwrite_lock import ReadWriteMutex -from .dogpile import Lock -import time -import contextlib - -class Dogpile(object): - """Dogpile lock class. - - .. deprecated:: 0.4.0 - The :class:`.Lock` object specifies the full - API of the :class:`.Dogpile` object in a single way, - rather than providing multiple modes of usage which - don't necessarily work in the majority of cases. - :class:`.Dogpile` is now a wrapper around the :class:`.Lock` object - which provides dogpile.core's original usage pattern. - This usage pattern began as something simple, but was - not of general use in real-world caching environments without - several extra complicating factors; the :class:`.Lock` - object presents the "real-world" API more succinctly, - and also fixes a cross-process concurrency issue. - - :param expiretime: Expiration time in seconds. Set to - ``None`` for never expires. - :param init: if True, set the 'createdtime' to the - current time. - :param lock: a mutex object that provides - ``acquire()`` and ``release()`` methods. - - """ - def __init__(self, expiretime, init=False, lock=None): - """Construct a new :class:`.Dogpile`. - - """ - if lock: - self.dogpilelock = lock - else: - self.dogpilelock = threading.Lock() - - self.expiretime = expiretime - if init: - self.createdtime = time.time() - - createdtime = -1 - """The last known 'creation time' of the value, - stored as an epoch (i.e. from ``time.time()``). - - If the value here is -1, it is assumed the value - should recreate immediately. - - """ - - def acquire(self, creator, - value_fn=None, - value_and_created_fn=None): - """Acquire the lock, returning a context manager. - - :param creator: Creation function, used if this thread - is chosen to create a new value. - - :param value_fn: Optional function that returns - the value from some datasource. Will be returned - if regeneration is not needed. - - :param value_and_created_fn: Like value_fn, but returns a tuple - of (value, createdtime). The returned createdtime - will replace the "createdtime" value on this dogpile - lock. This option removes the need for the dogpile lock - itself to remain persistent across usages; another - dogpile can come along later and pick up where the - previous one left off. - - """ - - if value_and_created_fn is None: - if value_fn is None: - def value_and_created_fn(): - return None, self.createdtime - else: - def value_and_created_fn(): - return value_fn(), self.createdtime - - def creator_wrapper(): - value = creator() - self.createdtime = time.time() - return value, self.createdtime - else: - def creator_wrapper(): - value = creator() - self.createdtime = time.time() - return value - - return Lock( - self.dogpilelock, - creator_wrapper, - value_and_created_fn, - self.expiretime - ) - - @property - def is_expired(self): - """Return true if the expiration time is reached, or no - value is available.""" - - return not self.has_value or \ - ( - self.expiretime is not None and - time.time() - self.createdtime > self.expiretime - ) - - @property - def has_value(self): - """Return true if the creation function has proceeded - at least once.""" - return self.createdtime > 0 - - -class SyncReaderDogpile(Dogpile): - """Provide a read-write lock function on top of the :class:`.Dogpile` - class. - - .. deprecated:: 0.4.0 - The :class:`.ReadWriteMutex` object can be used directly. - - """ - def __init__(self, *args, **kw): - super(SyncReaderDogpile, self).__init__(*args, **kw) - self.readwritelock = ReadWriteMutex() - - @contextlib.contextmanager - def acquire_write_lock(self): - """Return the "write" lock context manager. - - This will provide a section that is mutexed against - all readers/writers for the dogpile-maintained value. - - """ - - self.readwritelock.acquire_write_lock() - try: - yield - finally: - self.readwritelock.release_write_lock() - - @contextlib.contextmanager - def acquire(self, *arg, **kw): - with super(SyncReaderDogpile, self).acquire(*arg, **kw) as value: - self.readwritelock.acquire_read_lock() - try: - yield value - finally: - self.readwritelock.release_read_lock() diff --git a/lib/dogpile/core/util.py b/lib/dogpile/core/util.py deleted file mode 100644 index f53c6818c4..0000000000 --- a/lib/dogpile/core/util.py +++ /dev/null @@ -1,8 +0,0 @@ -import sys -py3k = sys.version_info >= (3, 0) - -try: - import threading -except ImportError: - import dummy_threading as threading - diff --git a/lib/dogpile/core/dogpile.py b/lib/dogpile/lock.py similarity index 92% rename from lib/dogpile/core/dogpile.py rename to lib/dogpile/lock.py index 2e3ca0e931..29f342d84f 100644 --- a/lib/dogpile/core/dogpile.py +++ b/lib/dogpile/lock.py @@ -3,6 +3,7 @@ log = logging.getLogger(__name__) + class NeedRegenerationException(Exception): """An exception that when raised in the 'with' block, forces the 'has_value' flag to False and incurs a @@ -12,6 +13,7 @@ class NeedRegenerationException(Exception): NOT_REGENERATED = object() + class Lock(object): """Dogpile lock class. @@ -21,11 +23,6 @@ class Lock(object): continue to return the previous version of that value. - .. versionadded:: 0.4.0 - The :class:`.Lock` class was added as a single-use object - representing the dogpile API without dependence on - any shared state between multiple instances. - :param mutex: A mutex object that provides ``acquire()`` and ``release()`` methods. :param creator: Callable which returns a tuple of the form @@ -52,17 +49,16 @@ class Lock(object): this to be used to defer invocation of the creator callable until some later time. - .. versionadded:: 0.4.1 added the async_creator argument. - """ - def __init__(self, - mutex, - creator, - value_and_created_fn, - expiretime, - async_creator=None, - ): + def __init__( + self, + mutex, + creator, + value_and_created_fn, + expiretime, + async_creator=None, + ): self.mutex = mutex self.creator = creator self.value_and_created_fn = value_and_created_fn diff --git a/lib/dogpile/util/__init__.py b/lib/dogpile/util/__init__.py new file mode 100644 index 0000000000..7a1c0dcdb1 --- /dev/null +++ b/lib/dogpile/util/__init__.py @@ -0,0 +1,4 @@ +from .nameregistry import NameRegistry # noqa +from .readwrite_lock import ReadWriteMutex # noqa +from .langhelpers import PluginLoader, memoized_property, \ + coerce_string_conf, to_list, KeyReentrantMutex # noqa \ No newline at end of file diff --git a/lib/dogpile/cache/compat.py b/lib/dogpile/util/compat.py similarity index 100% rename from lib/dogpile/cache/compat.py rename to lib/dogpile/util/compat.py diff --git a/lib/dogpile/util/langhelpers.py b/lib/dogpile/util/langhelpers.py new file mode 100644 index 0000000000..2d34a2cc77 --- /dev/null +++ b/lib/dogpile/util/langhelpers.py @@ -0,0 +1,120 @@ +import re +import collections +from . import compat + + +def coerce_string_conf(d): + result = {} + for k, v in d.items(): + if not isinstance(v, compat.string_types): + result[k] = v + continue + + v = v.strip() + if re.match(r'^[-+]?\d+$', v): + result[k] = int(v) + elif re.match(r'^[-+]?(?:\d+(?:\.\d*)?|\.\d+)(?:[eE][-+]?\d+)?$', v): + result[k] = float(v) + elif v.lower() in ('false', 'true'): + result[k] = v.lower() == 'true' + elif v == 'None': + result[k] = None + else: + result[k] = v + return result + + +class PluginLoader(object): + def __init__(self, group): + self.group = group + self.impls = {} + + def load(self, name): + if name in self.impls: + return self.impls[name]() + else: # pragma NO COVERAGE + import pkg_resources + for impl in pkg_resources.iter_entry_points( + self.group, name): + self.impls[name] = impl.load + return impl.load() + else: + raise Exception( + "Can't load plugin %s %s" % + (self.group, name)) + + def register(self, name, modulepath, objname): + def load(): + mod = __import__(modulepath, fromlist=[objname]) + return getattr(mod, objname) + self.impls[name] = load + + +class memoized_property(object): + """A read-only @property that is only evaluated once.""" + def __init__(self, fget, doc=None): + self.fget = fget + self.__doc__ = doc or fget.__doc__ + self.__name__ = fget.__name__ + + def __get__(self, obj, cls): + if obj is None: + return self + obj.__dict__[self.__name__] = result = self.fget(obj) + return result + + +def to_list(x, default=None): + """Coerce to a list.""" + if x is None: + return default + if not isinstance(x, (list, tuple)): + return [x] + else: + return x + + +class KeyReentrantMutex(object): + + def __init__(self, key, mutex, keys): + self.key = key + self.mutex = mutex + self.keys = keys + + @classmethod + def factory(cls, mutex): + # this collection holds zero or one + # thread idents as the key; a set of + # keynames held as the value. + keystore = collections.defaultdict(set) + + def fac(key): + return KeyReentrantMutex(key, mutex, keystore) + return fac + + def acquire(self, wait=True): + current_thread = compat.threading.current_thread().ident + keys = self.keys.get(current_thread) + if keys is not None and \ + self.key not in keys: + # current lockholder, new key. add it in + keys.add(self.key) + return True + elif self.mutex.acquire(wait=wait): + # after acquire, create new set and add our key + self.keys[current_thread].add(self.key) + return True + else: + return False + + def release(self): + current_thread = compat.threading.current_thread().ident + keys = self.keys.get(current_thread) + assert keys is not None, "this thread didn't do the acquire" + assert self.key in keys, "No acquire held for key '%s'" % self.key + keys.remove(self.key) + if not keys: + # when list of keys empty, remove + # the thread ident and unlock. + del self.keys[current_thread] + self.mutex.release() diff --git a/lib/dogpile/core/nameregistry.py b/lib/dogpile/util/nameregistry.py similarity index 98% rename from lib/dogpile/core/nameregistry.py rename to lib/dogpile/util/nameregistry.py index a73f450c71..a5102b2387 100644 --- a/lib/dogpile/core/nameregistry.py +++ b/lib/dogpile/util/nameregistry.py @@ -1,6 +1,7 @@ -from .util import threading +from .compat import threading import weakref + class NameRegistry(object): """Generates and return an object, keeping it as a singleton for a certain identifier for as long as its diff --git a/lib/dogpile/core/readwrite_lock.py b/lib/dogpile/util/readwrite_lock.py similarity index 95% rename from lib/dogpile/core/readwrite_lock.py rename to lib/dogpile/util/readwrite_lock.py index 1ea25e47ab..2196ed7d70 100644 --- a/lib/dogpile/core/readwrite_lock.py +++ b/lib/dogpile/util/readwrite_lock.py @@ -1,22 +1,24 @@ -from .util import threading +from .compat import threading import logging log = logging.getLogger(__name__) + class LockError(Exception): pass + class ReadWriteMutex(object): """A mutex which allows multiple readers, single writer. - + :class:`.ReadWriteMutex` uses a Python ``threading.Condition`` to provide this functionality across threads within a process. - + The Beaker package also contained a file-lock based version of this concept, so that readers/writers could be synchronized - across processes with a common filesystem. A future Dogpile + across processes with a common filesystem. A future Dogpile release may include this additional class at some point. - + """ def __init__(self): @@ -48,7 +50,7 @@ def acquire_read_lock(self, wait = True): finally: self.condition.release() - if not wait: + if not wait: return True def release_read_lock(self): @@ -57,7 +59,7 @@ def release_read_lock(self): try: self.async -= 1 - # check if we are the last asynchronous reader thread + # check if we are the last asynchronous reader thread # out the door. if self.async == 0: # yes. so if a sync operation is waiting, notifyAll to wake @@ -88,7 +90,7 @@ def acquire_write_lock(self, wait = True): if self.current_sync_operation is not None: return False - # establish ourselves as the current sync + # establish ourselves as the current sync # this indicates to other read/write operations # that they should wait until this is None again self.current_sync_operation = threading.currentThread() @@ -106,7 +108,7 @@ def acquire_write_lock(self, wait = True): finally: self.condition.release() - if not wait: + if not wait: return True def release_write_lock(self): @@ -117,7 +119,7 @@ def release_write_lock(self): raise LockError("Synchronizer error - current thread doesn't " "have the write lock") - # reset the current sync operation so + # reset the current sync operation so # another can get it self.current_sync_operation = None diff --git a/lib/enum34/LICENSE b/lib/enum/LICENSE similarity index 100% rename from lib/enum34/LICENSE rename to lib/enum/LICENSE diff --git a/lib/enum34/README b/lib/enum/README similarity index 81% rename from lib/enum34/README rename to lib/enum/README index 511af98413..aa2333d8df 100644 --- a/lib/enum34/README +++ b/lib/enum/README @@ -1,2 +1,3 @@ enum34 is the new Python stdlib enum module available in Python 3.4 backported for previous versions of Python from 2.4 to 3.3. +tested on 2.6, 2.7, and 3.3+ diff --git a/lib/enum34/__init__.py b/lib/enum/__init__.py similarity index 89% rename from lib/enum34/__init__.py rename to lib/enum/__init__.py index 6a327a8a8f..d6ffb3a40f 100644 --- a/lib/enum34/__init__.py +++ b/lib/enum/__init__.py @@ -4,7 +4,7 @@ __all__ = ['Enum', 'IntEnum', 'unique'] -version = 1, 0, 4 +version = 1, 1, 6 pyver = float('%s.%s' % _sys.version_info[:2]) @@ -77,7 +77,7 @@ def _is_dunder(name): def _is_sunder(name): """Returns True if a _sunder_ name, False otherwise.""" - return (name[0] == name[-1] == '_' and + return (name[0] == name[-1] == '_' and name[1:2] != '_' and name[-2:-1] != '_' and len(name) > 2) @@ -118,10 +118,13 @@ def __setitem__(self, key, value): leftover from 2.x """ - if pyver >= 3.0 and key == '__order__': - return + if pyver >= 3.0 and key in ('_order_', '__order__'): + return + elif key == '__order__': + key = '_order_' if _is_sunder(key): - raise ValueError('_names_ are reserved for future Enum use') + if key != '_order_': + raise ValueError('_names_ are reserved for future Enum use') elif _is_dunder(key): pass elif key in self._member_names: @@ -168,21 +171,21 @@ def __new__(metacls, cls, bases, classdict): del classdict[name] # py2 support for definition order - __order__ = classdict.get('__order__') - if __order__ is None: + _order_ = classdict.get('_order_') + if _order_ is None: if pyver < 3.0: try: - __order__ = [name for (name, value) in sorted(members.items(), key=lambda item: item[1])] + _order_ = [name for (name, value) in sorted(members.items(), key=lambda item: item[1])] except TypeError: - __order__ = [name for name in sorted(members.keys())] + _order_ = [name for name in sorted(members.keys())] else: - __order__ = classdict._member_names + _order_ = classdict._member_names else: - del classdict['__order__'] + del classdict['_order_'] if pyver < 3.0: - __order__ = __order__.replace(',', ' ').split() - aliases = [name for name in members if name not in __order__] - __order__ += aliases + _order_ = _order_.replace(',', ' ').split() + aliases = [name for name in members if name not in _order_] + _order_ += aliases # check for illegal enum names (any others?) invalid_names = set(members) & set(['mro']) @@ -190,6 +193,9 @@ def __new__(metacls, cls, bases, classdict): raise ValueError('Invalid enum member name(s): %s' % ( ', '.join(invalid_names), )) + # save attributes from super classes so we know if we can take + # the shortcut of storing members in the class dict + base_attributes = set([a for b in bases for a in b.__dict__]) # create our new Enum type enum_class = super(EnumMeta, metacls).__new__(metacls, cls, bases, classdict) enum_class._member_names_ = [] # names in random order @@ -208,7 +214,7 @@ def __new__(metacls, cls, bases, classdict): # auto-numbering ;) if __new__ is None: __new__ = enum_class.__new__ - for member_name in __order__: + for member_name in _order_: value = members[member_name] if not isinstance(value, tuple): args = (value, ) @@ -237,6 +243,11 @@ def __new__(metacls, cls, bases, classdict): else: # Aliases don't appear in member names (only in __members__). enum_class._member_names_.append(member_name) + # performance boost for any member that would not shadow + # a DynamicClassAttribute (aka _RouteClassAttributeToGetattr) + if member_name not in base_attributes: + setattr(enum_class, member_name, enum_member) + # now add to _member_map_ enum_class._member_map_[member_name] = enum_member try: # This may fail if value is not hashable. We can't add the value @@ -310,7 +321,13 @@ def __new__(metacls, cls, bases, classdict): setattr(enum_class, '__new__', Enum.__dict__['__new__']) return enum_class - def __call__(cls, value, names=None, module=None, type=None): + def __bool__(cls): + """ + classes/types should always be True. + """ + return True + + def __call__(cls, value, names=None, module=None, type=None, start=1): """Either returns an existing member, or creates a new enum class. This method is used both when an enum class is given a value to match @@ -329,7 +346,7 @@ def __call__(cls, value, names=None, module=None, type=None): if names is None: # simple value lookup return cls.__new__(cls, value) # otherwise, functional API: we're creating a new Enum type - return cls._create_(value, names, module=module, type=type) + return cls._create_(value, names, module=module, type=type, start=start) def __contains__(cls, member): return isinstance(member, cls) and member.name in cls._member_map_ @@ -384,6 +401,8 @@ def __reversed__(cls): def __len__(cls): return len(cls._member_names_) + __nonzero__ = __bool__ + def __repr__(cls): return "" % cls.__name__ @@ -400,7 +419,7 @@ def __setattr__(cls, name, value): raise AttributeError('Cannot reassign members.') super(EnumMeta, cls).__setattr__(name, value) - def _create_(cls, class_name, names=None, module=None, type=None): + def _create_(cls, class_name, names=None, module=None, type=None, start=1): """Convenience method to create a new Enum class. `names` can be: @@ -425,25 +444,26 @@ def _create_(cls, class_name, names=None, module=None, type=None): else: bases = (type, cls) classdict = metacls.__prepare__(class_name, bases) - __order__ = [] + _order_ = [] # special processing needed for names? if isinstance(names, basestring): names = names.replace(',', ' ').split() if isinstance(names, (tuple, list)) and isinstance(names[0], basestring): - names = [(e, i+1) for (i, e) in enumerate(names)] + names = [(e, i+start) for (i, e) in enumerate(names)] # Here, names is either an iterable of (name, value) or a mapping. + item = None # in case names is empty for item in names: if isinstance(item, basestring): member_name, member_value = item, names[item] else: member_name, member_value = item classdict[member_name] = member_value - __order__.append(member_name) - # only set __order__ in classdict if name/value was not from a mapping + _order_.append(member_name) + # only set _order_ in classdict if name/value was not from a mapping if not isinstance(item, basestring): - classdict['__order__'] = ' '.join(__order__) + classdict['_order_'] = ' '.join(_order_) enum_class = metacls.__new__(metacls, class_name, bases, classdict) # TODO: replace the frame hack if a blessed way to know the calling @@ -470,7 +490,7 @@ def _get_mixins_(bases): """ if not bases or Enum is None: return object, Enum - + # double check that we are not subclassing a class with existing # enumeration members; while we're at it, see if any other data @@ -654,16 +674,17 @@ def __str__(self): temp_enum_dict['__str__'] = __str__ del __str__ -def __dir__(self): - added_behavior = [ - m - for cls in self.__class__.mro() - for m in cls.__dict__ - if m[0] != '_' - ] - return (['__class__', '__doc__', '__module__', ] + added_behavior) -temp_enum_dict['__dir__'] = __dir__ -del __dir__ +if pyver >= 3.0: + def __dir__(self): + added_behavior = [ + m + for cls in self.__class__.mro() + for m in cls.__dict__ + if m[0] != '_' and m not in self._member_map_ + ] + return (['__class__', '__doc__', '__module__', ] + added_behavior) + temp_enum_dict['__dir__'] = __dir__ + del __dir__ def __format__(self, format_spec): # mixed-in Enums should use the mixed-in type's __format__, otherwise @@ -719,7 +740,7 @@ def __gt__(self, other): raise TypeError("unorderable types: %s() > %s()" % (self.__class__.__name__, other.__class__.__name__)) temp_enum_dict['__gt__'] = __gt__ del __gt__ - + def __eq__(self, other): if type(other) is self.__class__: @@ -764,6 +785,30 @@ def value(self): temp_enum_dict['value'] = value del value +@classmethod +def _convert(cls, name, module, filter, source=None): + """ + Create a new Enum subclass that replaces a collection of global constants + """ + # convert all constants from source (or module) that pass filter() to + # a new Enum called name, and export the enum and its members back to + # module; + # also, replace the __reduce_ex__ method so unpickling works in + # previous Python versions + module_globals = vars(_sys.modules[module]) + if source: + source = vars(source) + else: + source = module_globals + members = dict((name, value) for name, value in source.items() if filter(name)) + cls = cls(name, members, module=module) + cls.__reduce_ex__ = _reduce_ex_by_name + module_globals.update(cls.__members__) + module_globals[name] = cls + return cls +temp_enum_dict['_convert'] = _convert +del _convert + Enum = EnumMeta('Enum', (object, ), temp_enum_dict) del temp_enum_dict @@ -773,6 +818,8 @@ def value(self): class IntEnum(int, Enum): """Enum where members are also (and must be) ints""" +def _reduce_ex_by_name(self, proto): + return self.name def unique(enumeration): """Class decorator that ensures only unique members exist in an enumeration.""" diff --git a/lib/enum34/doc/enum.rst b/lib/enum34/doc/enum.rst deleted file mode 100644 index 0d429bfc4c..0000000000 --- a/lib/enum34/doc/enum.rst +++ /dev/null @@ -1,725 +0,0 @@ -``enum`` --- support for enumerations -======================================== - -.. :synopsis: enumerations are sets of symbolic names bound to unique, constant - values. -.. :moduleauthor:: Ethan Furman -.. :sectionauthor:: Barry Warsaw , -.. :sectionauthor:: Eli Bendersky , -.. :sectionauthor:: Ethan Furman - ----------------- - -An enumeration is a set of symbolic names (members) bound to unique, constant -values. Within an enumeration, the members can be compared by identity, and -the enumeration itself can be iterated over. - - -Module Contents ---------------- - -This module defines two enumeration classes that can be used to define unique -sets of names and values: ``Enum`` and ``IntEnum``. It also defines -one decorator, ``unique``. - -``Enum`` - -Base class for creating enumerated constants. See section `Functional API`_ -for an alternate construction syntax. - -``IntEnum`` - -Base class for creating enumerated constants that are also subclasses of ``int``. - -``unique`` - -Enum class decorator that ensures only one name is bound to any one value. - - -Creating an Enum ----------------- - -Enumerations are created using the ``class`` syntax, which makes them -easy to read and write. An alternative creation method is described in -`Functional API`_. To define an enumeration, subclass ``Enum`` as -follows:: - - >>> from enum import Enum - >>> class Color(Enum): - ... red = 1 - ... green = 2 - ... blue = 3 - -Note: Nomenclature - - - The class ``Color`` is an *enumeration* (or *enum*) - - The attributes ``Color.red``, ``Color.green``, etc., are - *enumeration members* (or *enum members*). - - The enum members have *names* and *values* (the name of - ``Color.red`` is ``red``, the value of ``Color.blue`` is - ``3``, etc.) - -Note: - - Even though we use the ``class`` syntax to create Enums, Enums - are not normal Python classes. See `How are Enums different?`_ for - more details. - -Enumeration members have human readable string representations:: - - >>> print(Color.red) - Color.red - -...while their ``repr`` has more information:: - - >>> print(repr(Color.red)) - - -The *type* of an enumeration member is the enumeration it belongs to:: - - >>> type(Color.red) - - >>> isinstance(Color.green, Color) - True - >>> - -Enum members also have a property that contains just their item name:: - - >>> print(Color.red.name) - red - -Enumerations support iteration. In Python 3.x definition order is used; in -Python 2.x the definition order is not available, but class attribute -``__order__`` is supported; otherwise, value order is used:: - - >>> class Shake(Enum): - ... __order__ = 'vanilla chocolate cookies mint' # only needed in 2.x - ... vanilla = 7 - ... chocolate = 4 - ... cookies = 9 - ... mint = 3 - ... - >>> for shake in Shake: - ... print(shake) - ... - Shake.vanilla - Shake.chocolate - Shake.cookies - Shake.mint - -The ``__order__`` attribute is always removed, and in 3.x it is also ignored -(order is definition order); however, in the stdlib version it will be ignored -but not removed. - -Enumeration members are hashable, so they can be used in dictionaries and sets:: - - >>> apples = {} - >>> apples[Color.red] = 'red delicious' - >>> apples[Color.green] = 'granny smith' - >>> apples == {Color.red: 'red delicious', Color.green: 'granny smith'} - True - - -Programmatic access to enumeration members and their attributes ---------------------------------------------------------------- - -Sometimes it's useful to access members in enumerations programmatically (i.e. -situations where ``Color.red`` won't do because the exact color is not known -at program-writing time). ``Enum`` allows such access:: - - >>> Color(1) - - >>> Color(3) - - -If you want to access enum members by *name*, use item access:: - - >>> Color['red'] - - >>> Color['green'] - - -If have an enum member and need its ``name`` or ``value``:: - - >>> member = Color.red - >>> member.name - 'red' - >>> member.value - 1 - - -Duplicating enum members and values ------------------------------------ - -Having two enum members (or any other attribute) with the same name is invalid; -in Python 3.x this would raise an error, but in Python 2.x the second member -simply overwrites the first:: - - >>> # python 2.x - >>> class Shape(Enum): - ... square = 2 - ... square = 3 - ... - >>> Shape.square - - - >>> # python 3.x - >>> class Shape(Enum): - ... square = 2 - ... square = 3 - Traceback (most recent call last): - ... - TypeError: Attempted to reuse key: 'square' - -However, two enum members are allowed to have the same value. Given two members -A and B with the same value (and A defined first), B is an alias to A. By-value -lookup of the value of A and B will return A. By-name lookup of B will also -return A:: - - >>> class Shape(Enum): - ... __order__ = 'square diamond circle alias_for_square' # only needed in 2.x - ... square = 2 - ... diamond = 1 - ... circle = 3 - ... alias_for_square = 2 - ... - >>> Shape.square - - >>> Shape.alias_for_square - - >>> Shape(2) - - - -Allowing aliases is not always desirable. ``unique`` can be used to ensure -that none exist in a particular enumeration:: - - >>> from enum import unique - >>> @unique - ... class Mistake(Enum): - ... __order__ = 'one two three four' # only needed in 2.x - ... one = 1 - ... two = 2 - ... three = 3 - ... four = 3 - Traceback (most recent call last): - ... - ValueError: duplicate names found in : four -> three - -Iterating over the members of an enum does not provide the aliases:: - - >>> list(Shape) - [, , ] - -The special attribute ``__members__`` is a dictionary mapping names to members. -It includes all names defined in the enumeration, including the aliases:: - - >>> for name, member in sorted(Shape.__members__.items()): - ... name, member - ... - ('alias_for_square', ) - ('circle', ) - ('diamond', ) - ('square', ) - -The ``__members__`` attribute can be used for detailed programmatic access to -the enumeration members. For example, finding all the aliases:: - - >>> [name for name, member in Shape.__members__.items() if member.name != name] - ['alias_for_square'] - -Comparisons ------------ - -Enumeration members are compared by identity:: - - >>> Color.red is Color.red - True - >>> Color.red is Color.blue - False - >>> Color.red is not Color.blue - True - -Ordered comparisons between enumeration values are *not* supported. Enum -members are not integers (but see `IntEnum`_ below):: - - >>> Color.red < Color.blue - Traceback (most recent call last): - File "", line 1, in - TypeError: unorderable types: Color() < Color() - -.. warning:: - - In Python 2 *everything* is ordered, even though the ordering may not - make sense. If you want your enumerations to have a sensible ordering - check out the `OrderedEnum`_ recipe below. - - -Equality comparisons are defined though:: - - >>> Color.blue == Color.red - False - >>> Color.blue != Color.red - True - >>> Color.blue == Color.blue - True - -Comparisons against non-enumeration values will always compare not equal -(again, ``IntEnum`` was explicitly designed to behave differently, see -below):: - - >>> Color.blue == 2 - False - - -Allowed members and attributes of enumerations ----------------------------------------------- - -The examples above use integers for enumeration values. Using integers is -short and handy (and provided by default by the `Functional API`_), but not -strictly enforced. In the vast majority of use-cases, one doesn't care what -the actual value of an enumeration is. But if the value *is* important, -enumerations can have arbitrary values. - -Enumerations are Python classes, and can have methods and special methods as -usual. If we have this enumeration:: - - >>> class Mood(Enum): - ... funky = 1 - ... happy = 3 - ... - ... def describe(self): - ... # self is the member here - ... return self.name, self.value - ... - ... def __str__(self): - ... return 'my custom str! {0}'.format(self.value) - ... - ... @classmethod - ... def favorite_mood(cls): - ... # cls here is the enumeration - ... return cls.happy - -Then:: - - >>> Mood.favorite_mood() - - >>> Mood.happy.describe() - ('happy', 3) - >>> str(Mood.funky) - 'my custom str! 1' - -The rules for what is allowed are as follows: _sunder_ names (starting and -ending with a single underscore) are reserved by enum and cannot be used; -all other attributes defined within an enumeration will become members of this -enumeration, with the exception of *__dunder__* names and descriptors (methods -are also descriptors). - -Note: - - If your enumeration defines ``__new__`` and/or ``__init__`` then - whatever value(s) were given to the enum member will be passed into - those methods. See `Planet`_ for an example. - - -Restricted subclassing of enumerations --------------------------------------- - -Subclassing an enumeration is allowed only if the enumeration does not define -any members. So this is forbidden:: - - >>> class MoreColor(Color): - ... pink = 17 - Traceback (most recent call last): - ... - TypeError: Cannot extend enumerations - -But this is allowed:: - - >>> class Foo(Enum): - ... def some_behavior(self): - ... pass - ... - >>> class Bar(Foo): - ... happy = 1 - ... sad = 2 - ... - -Allowing subclassing of enums that define members would lead to a violation of -some important invariants of types and instances. On the other hand, it makes -sense to allow sharing some common behavior between a group of enumerations. -(See `OrderedEnum`_ for an example.) - - -Pickling --------- - -Enumerations can be pickled and unpickled:: - - >>> from enum.test_enum import Fruit - >>> from pickle import dumps, loads - >>> Fruit.tomato is loads(dumps(Fruit.tomato, 2)) - True - -The usual restrictions for pickling apply: picklable enums must be defined in -the top level of a module, since unpickling requires them to be importable -from that module. - -Note: - - With pickle protocol version 4 (introduced in Python 3.4) it is possible - to easily pickle enums nested in other classes. - - - -Functional API --------------- - -The ``Enum`` class is callable, providing the following functional API:: - - >>> Animal = Enum('Animal', 'ant bee cat dog') - >>> Animal - - >>> Animal.ant - - >>> Animal.ant.value - 1 - >>> list(Animal) - [, , , ] - -The semantics of this API resemble ``namedtuple``. The first argument -of the call to ``Enum`` is the name of the enumeration. - -The second argument is the *source* of enumeration member names. It can be a -whitespace-separated string of names, a sequence of names, a sequence of -2-tuples with key/value pairs, or a mapping (e.g. dictionary) of names to -values. The last two options enable assigning arbitrary values to -enumerations; the others auto-assign increasing integers starting with 1. A -new class derived from ``Enum`` is returned. In other words, the above -assignment to ``Animal`` is equivalent to:: - - >>> class Animals(Enum): - ... ant = 1 - ... bee = 2 - ... cat = 3 - ... dog = 4 - -Pickling enums created with the functional API can be tricky as frame stack -implementation details are used to try and figure out which module the -enumeration is being created in (e.g. it will fail if you use a utility -function in separate module, and also may not work on IronPython or Jython). -The solution is to specify the module name explicitly as follows:: - - >>> Animals = Enum('Animals', 'ant bee cat dog', module=__name__) - -Derived Enumerations --------------------- - -IntEnum -^^^^^^^ - -A variation of ``Enum`` is provided which is also a subclass of -``int``. Members of an ``IntEnum`` can be compared to integers; -by extension, integer enumerations of different types can also be compared -to each other:: - - >>> from enum import IntEnum - >>> class Shape(IntEnum): - ... circle = 1 - ... square = 2 - ... - >>> class Request(IntEnum): - ... post = 1 - ... get = 2 - ... - >>> Shape == 1 - False - >>> Shape.circle == 1 - True - >>> Shape.circle == Request.post - True - -However, they still can't be compared to standard ``Enum`` enumerations:: - - >>> class Shape(IntEnum): - ... circle = 1 - ... square = 2 - ... - >>> class Color(Enum): - ... red = 1 - ... green = 2 - ... - >>> Shape.circle == Color.red - False - -``IntEnum`` values behave like integers in other ways you'd expect:: - - >>> int(Shape.circle) - 1 - >>> ['a', 'b', 'c'][Shape.circle] - 'b' - >>> [i for i in range(Shape.square)] - [0, 1] - -For the vast majority of code, ``Enum`` is strongly recommended, -since ``IntEnum`` breaks some semantic promises of an enumeration (by -being comparable to integers, and thus by transitivity to other -unrelated enumerations). It should be used only in special cases where -there's no other choice; for example, when integer constants are -replaced with enumerations and backwards compatibility is required with code -that still expects integers. - - -Others -^^^^^^ - -While ``IntEnum`` is part of the ``enum`` module, it would be very -simple to implement independently:: - - class IntEnum(int, Enum): - pass - -This demonstrates how similar derived enumerations can be defined; for example -a ``StrEnum`` that mixes in ``str`` instead of ``int``. - -Some rules: - -1. When subclassing ``Enum``, mix-in types must appear before - ``Enum`` itself in the sequence of bases, as in the ``IntEnum`` - example above. -2. While ``Enum`` can have members of any type, once you mix in an - additional type, all the members must have values of that type, e.g. - ``int`` above. This restriction does not apply to mix-ins which only - add methods and don't specify another data type such as ``int`` or - ``str``. -3. When another data type is mixed in, the ``value`` attribute is *not the - same* as the enum member itself, although it is equivalant and will compare - equal. -4. %-style formatting: ``%s`` and ``%r`` call ``Enum``'s ``__str__`` and - ``__repr__`` respectively; other codes (such as ``%i`` or ``%h`` for - IntEnum) treat the enum member as its mixed-in type. - - Note: Prior to Python 3.4 there is a bug in ``str``'s %-formatting: ``int`` - subclasses are printed as strings and not numbers when the ``%d``, ``%i``, - or ``%u`` codes are used. -5. ``str.__format__`` (or ``format``) will use the mixed-in - type's ``__format__``. If the ``Enum``'s ``str`` or - ``repr`` is desired use the ``!s`` or ``!r`` ``str`` format codes. - - -Decorators ----------- - -unique -^^^^^^ - -A ``class`` decorator specifically for enumerations. It searches an -enumeration's ``__members__`` gathering any aliases it finds; if any are -found ``ValueError`` is raised with the details:: - - >>> @unique - ... class NoDupes(Enum): - ... first = 'one' - ... second = 'two' - ... third = 'two' - Traceback (most recent call last): - ... - ValueError: duplicate names found in : third -> second - - -Interesting examples --------------------- - -While ``Enum`` and ``IntEnum`` are expected to cover the majority of -use-cases, they cannot cover them all. Here are recipes for some different -types of enumerations that can be used directly, or as examples for creating -one's own. - - -AutoNumber -^^^^^^^^^^ - -Avoids having to specify the value for each enumeration member:: - - >>> class AutoNumber(Enum): - ... def __new__(cls): - ... value = len(cls.__members__) + 1 - ... obj = object.__new__(cls) - ... obj._value_ = value - ... return obj - ... - >>> class Color(AutoNumber): - ... __order__ = "red green blue" # only needed in 2.x - ... red = () - ... green = () - ... blue = () - ... - >>> Color.green.value == 2 - True - -Note: - - The `__new__` method, if defined, is used during creation of the Enum - members; it is then replaced by Enum's `__new__` which is used after - class creation for lookup of existing members. Due to the way Enums are - supposed to behave, there is no way to customize Enum's `__new__`. - - -UniqueEnum -^^^^^^^^^^ - -Raises an error if a duplicate member name is found instead of creating an -alias:: - - >>> class UniqueEnum(Enum): - ... def __init__(self, *args): - ... cls = self.__class__ - ... if any(self.value == e.value for e in cls): - ... a = self.name - ... e = cls(self.value).name - ... raise ValueError( - ... "aliases not allowed in UniqueEnum: %r --> %r" - ... % (a, e)) - ... - >>> class Color(UniqueEnum): - ... red = 1 - ... green = 2 - ... blue = 3 - ... grene = 2 - Traceback (most recent call last): - ... - ValueError: aliases not allowed in UniqueEnum: 'grene' --> 'green' - - -OrderedEnum -^^^^^^^^^^^ - -An ordered enumeration that is not based on ``IntEnum`` and so maintains -the normal ``Enum`` invariants (such as not being comparable to other -enumerations):: - - >>> class OrderedEnum(Enum): - ... def __ge__(self, other): - ... if self.__class__ is other.__class__: - ... return self._value_ >= other._value_ - ... return NotImplemented - ... def __gt__(self, other): - ... if self.__class__ is other.__class__: - ... return self._value_ > other._value_ - ... return NotImplemented - ... def __le__(self, other): - ... if self.__class__ is other.__class__: - ... return self._value_ <= other._value_ - ... return NotImplemented - ... def __lt__(self, other): - ... if self.__class__ is other.__class__: - ... return self._value_ < other._value_ - ... return NotImplemented - ... - >>> class Grade(OrderedEnum): - ... __ordered__ = 'A B C D F' - ... A = 5 - ... B = 4 - ... C = 3 - ... D = 2 - ... F = 1 - ... - >>> Grade.C < Grade.A - True - - -Planet -^^^^^^ - -If ``__new__`` or ``__init__`` is defined the value of the enum member -will be passed to those methods:: - - >>> class Planet(Enum): - ... MERCURY = (3.303e+23, 2.4397e6) - ... VENUS = (4.869e+24, 6.0518e6) - ... EARTH = (5.976e+24, 6.37814e6) - ... MARS = (6.421e+23, 3.3972e6) - ... JUPITER = (1.9e+27, 7.1492e7) - ... SATURN = (5.688e+26, 6.0268e7) - ... URANUS = (8.686e+25, 2.5559e7) - ... NEPTUNE = (1.024e+26, 2.4746e7) - ... def __init__(self, mass, radius): - ... self.mass = mass # in kilograms - ... self.radius = radius # in meters - ... @property - ... def surface_gravity(self): - ... # universal gravitational constant (m3 kg-1 s-2) - ... G = 6.67300E-11 - ... return G * self.mass / (self.radius * self.radius) - ... - >>> Planet.EARTH.value - (5.976e+24, 6378140.0) - >>> Planet.EARTH.surface_gravity - 9.802652743337129 - - -How are Enums different? ------------------------- - -Enums have a custom metaclass that affects many aspects of both derived Enum -classes and their instances (members). - - -Enum Classes -^^^^^^^^^^^^ - -The ``EnumMeta`` metaclass is responsible for providing the -``__contains__``, ``__dir__``, ``__iter__`` and other methods that -allow one to do things with an ``Enum`` class that fail on a typical -class, such as ``list(Color)`` or ``some_var in Color``. ``EnumMeta`` is -responsible for ensuring that various other methods on the final ``Enum`` -class are correct (such as ``__new__``, ``__getnewargs__``, -``__str__`` and ``__repr__``) - - -Enum Members (aka instances) -^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The most interesting thing about Enum members is that they are singletons. -``EnumMeta`` creates them all while it is creating the ``Enum`` -class itself, and then puts a custom ``__new__`` in place to ensure -that no new ones are ever instantiated by returning only the existing -member instances. - - -Finer Points -^^^^^^^^^^^^ - -Enum members are instances of an Enum class, and even though they are -accessible as ``EnumClass.member``, they are not accessible directly from -the member:: - - >>> Color.red - - >>> Color.red.blue - Traceback (most recent call last): - ... - AttributeError: 'Color' object has no attribute 'blue' - -Likewise, ``__members__`` is only available on the class. - -In Python 3.x ``__members__`` is always an ``OrderedDict``, with the order being -the definition order. In Python 2.7 ``__members__`` is an ``OrderedDict`` if -``__order__`` was specified, and a plain ``dict`` otherwise. In all other Python -2.x versions ``__members__`` is a plain ``dict`` even if ``__order__`` was specified -as the ``OrderedDict`` type didn't exist yet. - -If you give your ``Enum`` subclass extra methods, like the `Planet`_ -class above, those methods will show up in a `dir` of the member, -but not of the class:: - - >>> dir(Planet) - ['EARTH', 'JUPITER', 'MARS', 'MERCURY', 'NEPTUNE', 'SATURN', 'URANUS', - 'VENUS', '__class__', '__doc__', '__members__', '__module__'] - >>> dir(Planet.EARTH) - ['__class__', '__doc__', '__module__', 'name', 'surface_gravity', 'value'] - -A ``__new__`` method will only be used for the creation of the -``Enum`` members -- after that it is replaced. This means if you wish to -change how ``Enum`` members are looked up you either have to write a -helper function or a ``classmethod``. diff --git a/lib/enum34/enum.py b/lib/enum34/enum.py deleted file mode 100644 index 6a327a8a8f..0000000000 --- a/lib/enum34/enum.py +++ /dev/null @@ -1,790 +0,0 @@ -"""Python Enumerations""" - -import sys as _sys - -__all__ = ['Enum', 'IntEnum', 'unique'] - -version = 1, 0, 4 - -pyver = float('%s.%s' % _sys.version_info[:2]) - -try: - any -except NameError: - def any(iterable): - for element in iterable: - if element: - return True - return False - -try: - from collections import OrderedDict -except ImportError: - OrderedDict = None - -try: - basestring -except NameError: - # In Python 2 basestring is the ancestor of both str and unicode - # in Python 3 it's just str, but was missing in 3.1 - basestring = str - -try: - unicode -except NameError: - # In Python 3 unicode no longer exists (it's just str) - unicode = str - -class _RouteClassAttributeToGetattr(object): - """Route attribute access on a class to __getattr__. - - This is a descriptor, used to define attributes that act differently when - accessed through an instance and through a class. Instance access remains - normal, but access to an attribute through a class will be routed to the - class's __getattr__ method; this is done by raising AttributeError. - - """ - def __init__(self, fget=None): - self.fget = fget - - def __get__(self, instance, ownerclass=None): - if instance is None: - raise AttributeError() - return self.fget(instance) - - def __set__(self, instance, value): - raise AttributeError("can't set attribute") - - def __delete__(self, instance): - raise AttributeError("can't delete attribute") - - -def _is_descriptor(obj): - """Returns True if obj is a descriptor, False otherwise.""" - return ( - hasattr(obj, '__get__') or - hasattr(obj, '__set__') or - hasattr(obj, '__delete__')) - - -def _is_dunder(name): - """Returns True if a __dunder__ name, False otherwise.""" - return (name[:2] == name[-2:] == '__' and - name[2:3] != '_' and - name[-3:-2] != '_' and - len(name) > 4) - - -def _is_sunder(name): - """Returns True if a _sunder_ name, False otherwise.""" - return (name[0] == name[-1] == '_' and - name[1:2] != '_' and - name[-2:-1] != '_' and - len(name) > 2) - - -def _make_class_unpicklable(cls): - """Make the given class un-picklable.""" - def _break_on_call_reduce(self, protocol=None): - raise TypeError('%r cannot be pickled' % self) - cls.__reduce_ex__ = _break_on_call_reduce - cls.__module__ = '' - - -class _EnumDict(dict): - """Track enum member order and ensure member names are not reused. - - EnumMeta will use the names found in self._member_names as the - enumeration member names. - - """ - def __init__(self): - super(_EnumDict, self).__init__() - self._member_names = [] - - def __setitem__(self, key, value): - """Changes anything not dundered or not a descriptor. - - If a descriptor is added with the same name as an enum member, the name - is removed from _member_names (this may leave a hole in the numerical - sequence of values). - - If an enum member name is used twice, an error is raised; duplicate - values are not checked for. - - Single underscore (sunder) names are reserved. - - Note: in 3.x __order__ is simply discarded as a not necessary piece - leftover from 2.x - - """ - if pyver >= 3.0 and key == '__order__': - return - if _is_sunder(key): - raise ValueError('_names_ are reserved for future Enum use') - elif _is_dunder(key): - pass - elif key in self._member_names: - # descriptor overwriting an enum? - raise TypeError('Attempted to reuse key: %r' % key) - elif not _is_descriptor(value): - if key in self: - # enum overwriting a descriptor? - raise TypeError('Key already defined as: %r' % self[key]) - self._member_names.append(key) - super(_EnumDict, self).__setitem__(key, value) - - -# Dummy value for Enum as EnumMeta explicity checks for it, but of course until -# EnumMeta finishes running the first time the Enum class doesn't exist. This -# is also why there are checks in EnumMeta like `if Enum is not None` -Enum = None - - -class EnumMeta(type): - """Metaclass for Enum""" - @classmethod - def __prepare__(metacls, cls, bases): - return _EnumDict() - - def __new__(metacls, cls, bases, classdict): - # an Enum class is final once enumeration items have been defined; it - # cannot be mixed with other types (int, float, etc.) if it has an - # inherited __new__ unless a new __new__ is defined (or the resulting - # class will fail). - if type(classdict) is dict: - original_dict = classdict - classdict = _EnumDict() - for k, v in original_dict.items(): - classdict[k] = v - - member_type, first_enum = metacls._get_mixins_(bases) - __new__, save_new, use_args = metacls._find_new_(classdict, member_type, - first_enum) - # save enum items into separate mapping so they don't get baked into - # the new class - members = dict((k, classdict[k]) for k in classdict._member_names) - for name in classdict._member_names: - del classdict[name] - - # py2 support for definition order - __order__ = classdict.get('__order__') - if __order__ is None: - if pyver < 3.0: - try: - __order__ = [name for (name, value) in sorted(members.items(), key=lambda item: item[1])] - except TypeError: - __order__ = [name for name in sorted(members.keys())] - else: - __order__ = classdict._member_names - else: - del classdict['__order__'] - if pyver < 3.0: - __order__ = __order__.replace(',', ' ').split() - aliases = [name for name in members if name not in __order__] - __order__ += aliases - - # check for illegal enum names (any others?) - invalid_names = set(members) & set(['mro']) - if invalid_names: - raise ValueError('Invalid enum member name(s): %s' % ( - ', '.join(invalid_names), )) - - # create our new Enum type - enum_class = super(EnumMeta, metacls).__new__(metacls, cls, bases, classdict) - enum_class._member_names_ = [] # names in random order - if OrderedDict is not None: - enum_class._member_map_ = OrderedDict() - else: - enum_class._member_map_ = {} # name->value map - enum_class._member_type_ = member_type - - # Reverse value->name map for hashable values. - enum_class._value2member_map_ = {} - - # instantiate them, checking for duplicates as we go - # we instantiate first instead of checking for duplicates first in case - # a custom __new__ is doing something funky with the values -- such as - # auto-numbering ;) - if __new__ is None: - __new__ = enum_class.__new__ - for member_name in __order__: - value = members[member_name] - if not isinstance(value, tuple): - args = (value, ) - else: - args = value - if member_type is tuple: # special case for tuple enums - args = (args, ) # wrap it one more time - if not use_args or not args: - enum_member = __new__(enum_class) - if not hasattr(enum_member, '_value_'): - enum_member._value_ = value - else: - enum_member = __new__(enum_class, *args) - if not hasattr(enum_member, '_value_'): - enum_member._value_ = member_type(*args) - value = enum_member._value_ - enum_member._name_ = member_name - enum_member.__objclass__ = enum_class - enum_member.__init__(*args) - # If another member with the same value was already defined, the - # new member becomes an alias to the existing one. - for name, canonical_member in enum_class._member_map_.items(): - if canonical_member.value == enum_member._value_: - enum_member = canonical_member - break - else: - # Aliases don't appear in member names (only in __members__). - enum_class._member_names_.append(member_name) - enum_class._member_map_[member_name] = enum_member - try: - # This may fail if value is not hashable. We can't add the value - # to the map, and by-value lookups for this value will be - # linear. - enum_class._value2member_map_[value] = enum_member - except TypeError: - pass - - - # If a custom type is mixed into the Enum, and it does not know how - # to pickle itself, pickle.dumps will succeed but pickle.loads will - # fail. Rather than have the error show up later and possibly far - # from the source, sabotage the pickle protocol for this class so - # that pickle.dumps also fails. - # - # However, if the new class implements its own __reduce_ex__, do not - # sabotage -- it's on them to make sure it works correctly. We use - # __reduce_ex__ instead of any of the others as it is preferred by - # pickle over __reduce__, and it handles all pickle protocols. - unpicklable = False - if '__reduce_ex__' not in classdict: - if member_type is not object: - methods = ('__getnewargs_ex__', '__getnewargs__', - '__reduce_ex__', '__reduce__') - if not any(m in member_type.__dict__ for m in methods): - _make_class_unpicklable(enum_class) - unpicklable = True - - - # double check that repr and friends are not the mixin's or various - # things break (such as pickle) - for name in ('__repr__', '__str__', '__format__', '__reduce_ex__'): - class_method = getattr(enum_class, name) - obj_method = getattr(member_type, name, None) - enum_method = getattr(first_enum, name, None) - if name not in classdict and class_method is not enum_method: - if name == '__reduce_ex__' and unpicklable: - continue - setattr(enum_class, name, enum_method) - - # method resolution and int's are not playing nice - # Python's less than 2.6 use __cmp__ - - if pyver < 2.6: - - if issubclass(enum_class, int): - setattr(enum_class, '__cmp__', getattr(int, '__cmp__')) - - elif pyver < 3.0: - - if issubclass(enum_class, int): - for method in ( - '__le__', - '__lt__', - '__gt__', - '__ge__', - '__eq__', - '__ne__', - '__hash__', - ): - setattr(enum_class, method, getattr(int, method)) - - # replace any other __new__ with our own (as long as Enum is not None, - # anyway) -- again, this is to support pickle - if Enum is not None: - # if the user defined their own __new__, save it before it gets - # clobbered in case they subclass later - if save_new: - setattr(enum_class, '__member_new__', enum_class.__dict__['__new__']) - setattr(enum_class, '__new__', Enum.__dict__['__new__']) - return enum_class - - def __call__(cls, value, names=None, module=None, type=None): - """Either returns an existing member, or creates a new enum class. - - This method is used both when an enum class is given a value to match - to an enumeration member (i.e. Color(3)) and for the functional API - (i.e. Color = Enum('Color', names='red green blue')). - - When used for the functional API: `module`, if set, will be stored in - the new class' __module__ attribute; `type`, if set, will be mixed in - as the first base class. - - Note: if `module` is not set this routine will attempt to discover the - calling module by walking the frame stack; if this is unsuccessful - the resulting class will not be pickleable. - - """ - if names is None: # simple value lookup - return cls.__new__(cls, value) - # otherwise, functional API: we're creating a new Enum type - return cls._create_(value, names, module=module, type=type) - - def __contains__(cls, member): - return isinstance(member, cls) and member.name in cls._member_map_ - - def __delattr__(cls, attr): - # nicer error message when someone tries to delete an attribute - # (see issue19025). - if attr in cls._member_map_: - raise AttributeError( - "%s: cannot delete Enum member." % cls.__name__) - super(EnumMeta, cls).__delattr__(attr) - - def __dir__(self): - return (['__class__', '__doc__', '__members__', '__module__'] + - self._member_names_) - - @property - def __members__(cls): - """Returns a mapping of member name->value. - - This mapping lists all enum members, including aliases. Note that this - is a copy of the internal mapping. - - """ - return cls._member_map_.copy() - - def __getattr__(cls, name): - """Return the enum member matching `name` - - We use __getattr__ instead of descriptors or inserting into the enum - class' __dict__ in order to support `name` and `value` being both - properties for enum members (which live in the class' __dict__) and - enum members themselves. - - """ - if _is_dunder(name): - raise AttributeError(name) - try: - return cls._member_map_[name] - except KeyError: - raise AttributeError(name) - - def __getitem__(cls, name): - return cls._member_map_[name] - - def __iter__(cls): - return (cls._member_map_[name] for name in cls._member_names_) - - def __reversed__(cls): - return (cls._member_map_[name] for name in reversed(cls._member_names_)) - - def __len__(cls): - return len(cls._member_names_) - - def __repr__(cls): - return "" % cls.__name__ - - def __setattr__(cls, name, value): - """Block attempts to reassign Enum members. - - A simple assignment to the class namespace only changes one of the - several possible ways to get an Enum member from the Enum class, - resulting in an inconsistent Enumeration. - - """ - member_map = cls.__dict__.get('_member_map_', {}) - if name in member_map: - raise AttributeError('Cannot reassign members.') - super(EnumMeta, cls).__setattr__(name, value) - - def _create_(cls, class_name, names=None, module=None, type=None): - """Convenience method to create a new Enum class. - - `names` can be: - - * A string containing member names, separated either with spaces or - commas. Values are auto-numbered from 1. - * An iterable of member names. Values are auto-numbered from 1. - * An iterable of (member name, value) pairs. - * A mapping of member name -> value. - - """ - if pyver < 3.0: - # if class_name is unicode, attempt a conversion to ASCII - if isinstance(class_name, unicode): - try: - class_name = class_name.encode('ascii') - except UnicodeEncodeError: - raise TypeError('%r is not representable in ASCII' % class_name) - metacls = cls.__class__ - if type is None: - bases = (cls, ) - else: - bases = (type, cls) - classdict = metacls.__prepare__(class_name, bases) - __order__ = [] - - # special processing needed for names? - if isinstance(names, basestring): - names = names.replace(',', ' ').split() - if isinstance(names, (tuple, list)) and isinstance(names[0], basestring): - names = [(e, i+1) for (i, e) in enumerate(names)] - - # Here, names is either an iterable of (name, value) or a mapping. - for item in names: - if isinstance(item, basestring): - member_name, member_value = item, names[item] - else: - member_name, member_value = item - classdict[member_name] = member_value - __order__.append(member_name) - # only set __order__ in classdict if name/value was not from a mapping - if not isinstance(item, basestring): - classdict['__order__'] = ' '.join(__order__) - enum_class = metacls.__new__(metacls, class_name, bases, classdict) - - # TODO: replace the frame hack if a blessed way to know the calling - # module is ever developed - if module is None: - try: - module = _sys._getframe(2).f_globals['__name__'] - except (AttributeError, ValueError): - pass - if module is None: - _make_class_unpicklable(enum_class) - else: - enum_class.__module__ = module - - return enum_class - - @staticmethod - def _get_mixins_(bases): - """Returns the type for creating enum members, and the first inherited - enum class. - - bases: the tuple of bases that was given to __new__ - - """ - if not bases or Enum is None: - return object, Enum - - - # double check that we are not subclassing a class with existing - # enumeration members; while we're at it, see if any other data - # type has been mixed in so we can use the correct __new__ - member_type = first_enum = None - for base in bases: - if (base is not Enum and - issubclass(base, Enum) and - base._member_names_): - raise TypeError("Cannot extend enumerations") - # base is now the last base in bases - if not issubclass(base, Enum): - raise TypeError("new enumerations must be created as " - "`ClassName([mixin_type,] enum_type)`") - - # get correct mix-in type (either mix-in type of Enum subclass, or - # first base if last base is Enum) - if not issubclass(bases[0], Enum): - member_type = bases[0] # first data type - first_enum = bases[-1] # enum type - else: - for base in bases[0].__mro__: - # most common: (IntEnum, int, Enum, object) - # possible: (, , - # , , - # ) - if issubclass(base, Enum): - if first_enum is None: - first_enum = base - else: - if member_type is None: - member_type = base - - return member_type, first_enum - - if pyver < 3.0: - @staticmethod - def _find_new_(classdict, member_type, first_enum): - """Returns the __new__ to be used for creating the enum members. - - classdict: the class dictionary given to __new__ - member_type: the data type whose __new__ will be used by default - first_enum: enumeration to check for an overriding __new__ - - """ - # now find the correct __new__, checking to see of one was defined - # by the user; also check earlier enum classes in case a __new__ was - # saved as __member_new__ - __new__ = classdict.get('__new__', None) - if __new__: - return None, True, True # __new__, save_new, use_args - - N__new__ = getattr(None, '__new__') - O__new__ = getattr(object, '__new__') - if Enum is None: - E__new__ = N__new__ - else: - E__new__ = Enum.__dict__['__new__'] - # check all possibles for __member_new__ before falling back to - # __new__ - for method in ('__member_new__', '__new__'): - for possible in (member_type, first_enum): - try: - target = possible.__dict__[method] - except (AttributeError, KeyError): - target = getattr(possible, method, None) - if target not in [ - None, - N__new__, - O__new__, - E__new__, - ]: - if method == '__member_new__': - classdict['__new__'] = target - return None, False, True - if isinstance(target, staticmethod): - target = target.__get__(member_type) - __new__ = target - break - if __new__ is not None: - break - else: - __new__ = object.__new__ - - # if a non-object.__new__ is used then whatever value/tuple was - # assigned to the enum member name will be passed to __new__ and to the - # new enum member's __init__ - if __new__ is object.__new__: - use_args = False - else: - use_args = True - - return __new__, False, use_args - else: - @staticmethod - def _find_new_(classdict, member_type, first_enum): - """Returns the __new__ to be used for creating the enum members. - - classdict: the class dictionary given to __new__ - member_type: the data type whose __new__ will be used by default - first_enum: enumeration to check for an overriding __new__ - - """ - # now find the correct __new__, checking to see of one was defined - # by the user; also check earlier enum classes in case a __new__ was - # saved as __member_new__ - __new__ = classdict.get('__new__', None) - - # should __new__ be saved as __member_new__ later? - save_new = __new__ is not None - - if __new__ is None: - # check all possibles for __member_new__ before falling back to - # __new__ - for method in ('__member_new__', '__new__'): - for possible in (member_type, first_enum): - target = getattr(possible, method, None) - if target not in ( - None, - None.__new__, - object.__new__, - Enum.__new__, - ): - __new__ = target - break - if __new__ is not None: - break - else: - __new__ = object.__new__ - - # if a non-object.__new__ is used then whatever value/tuple was - # assigned to the enum member name will be passed to __new__ and to the - # new enum member's __init__ - if __new__ is object.__new__: - use_args = False - else: - use_args = True - - return __new__, save_new, use_args - - -######################################################## -# In order to support Python 2 and 3 with a single -# codebase we have to create the Enum methods separately -# and then use the `type(name, bases, dict)` method to -# create the class. -######################################################## -temp_enum_dict = {} -temp_enum_dict['__doc__'] = "Generic enumeration.\n\n Derive from this class to define new enumerations.\n\n" - -def __new__(cls, value): - # all enum instances are actually created during class construction - # without calling this method; this method is called by the metaclass' - # __call__ (i.e. Color(3) ), and by pickle - if type(value) is cls: - # For lookups like Color(Color.red) - value = value.value - #return value - # by-value search for a matching enum member - # see if it's in the reverse mapping (for hashable values) - try: - if value in cls._value2member_map_: - return cls._value2member_map_[value] - except TypeError: - # not there, now do long search -- O(n) behavior - for member in cls._member_map_.values(): - if member.value == value: - return member - raise ValueError("%s is not a valid %s" % (value, cls.__name__)) -temp_enum_dict['__new__'] = __new__ -del __new__ - -def __repr__(self): - return "<%s.%s: %r>" % ( - self.__class__.__name__, self._name_, self._value_) -temp_enum_dict['__repr__'] = __repr__ -del __repr__ - -def __str__(self): - return "%s.%s" % (self.__class__.__name__, self._name_) -temp_enum_dict['__str__'] = __str__ -del __str__ - -def __dir__(self): - added_behavior = [ - m - for cls in self.__class__.mro() - for m in cls.__dict__ - if m[0] != '_' - ] - return (['__class__', '__doc__', '__module__', ] + added_behavior) -temp_enum_dict['__dir__'] = __dir__ -del __dir__ - -def __format__(self, format_spec): - # mixed-in Enums should use the mixed-in type's __format__, otherwise - # we can get strange results with the Enum name showing up instead of - # the value - - # pure Enum branch - if self._member_type_ is object: - cls = str - val = str(self) - # mix-in branch - else: - cls = self._member_type_ - val = self.value - return cls.__format__(val, format_spec) -temp_enum_dict['__format__'] = __format__ -del __format__ - - -#################################### -# Python's less than 2.6 use __cmp__ - -if pyver < 2.6: - - def __cmp__(self, other): - if type(other) is self.__class__: - if self is other: - return 0 - return -1 - return NotImplemented - raise TypeError("unorderable types: %s() and %s()" % (self.__class__.__name__, other.__class__.__name__)) - temp_enum_dict['__cmp__'] = __cmp__ - del __cmp__ - -else: - - def __le__(self, other): - raise TypeError("unorderable types: %s() <= %s()" % (self.__class__.__name__, other.__class__.__name__)) - temp_enum_dict['__le__'] = __le__ - del __le__ - - def __lt__(self, other): - raise TypeError("unorderable types: %s() < %s()" % (self.__class__.__name__, other.__class__.__name__)) - temp_enum_dict['__lt__'] = __lt__ - del __lt__ - - def __ge__(self, other): - raise TypeError("unorderable types: %s() >= %s()" % (self.__class__.__name__, other.__class__.__name__)) - temp_enum_dict['__ge__'] = __ge__ - del __ge__ - - def __gt__(self, other): - raise TypeError("unorderable types: %s() > %s()" % (self.__class__.__name__, other.__class__.__name__)) - temp_enum_dict['__gt__'] = __gt__ - del __gt__ - - -def __eq__(self, other): - if type(other) is self.__class__: - return self is other - return NotImplemented -temp_enum_dict['__eq__'] = __eq__ -del __eq__ - -def __ne__(self, other): - if type(other) is self.__class__: - return self is not other - return NotImplemented -temp_enum_dict['__ne__'] = __ne__ -del __ne__ - -def __hash__(self): - return hash(self._name_) -temp_enum_dict['__hash__'] = __hash__ -del __hash__ - -def __reduce_ex__(self, proto): - return self.__class__, (self._value_, ) -temp_enum_dict['__reduce_ex__'] = __reduce_ex__ -del __reduce_ex__ - -# _RouteClassAttributeToGetattr is used to provide access to the `name` -# and `value` properties of enum members while keeping some measure of -# protection from modification, while still allowing for an enumeration -# to have members named `name` and `value`. This works because enumeration -# members are not set directly on the enum class -- __getattr__ is -# used to look them up. - -@_RouteClassAttributeToGetattr -def name(self): - return self._name_ -temp_enum_dict['name'] = name -del name - -@_RouteClassAttributeToGetattr -def value(self): - return self._value_ -temp_enum_dict['value'] = value -del value - -Enum = EnumMeta('Enum', (object, ), temp_enum_dict) -del temp_enum_dict - -# Enum has now been created -########################### - -class IntEnum(int, Enum): - """Enum where members are also (and must be) ints""" - - -def unique(enumeration): - """Class decorator that ensures only unique members exist in an enumeration.""" - duplicates = [] - for name, member in enumeration.__members__.items(): - if name != member.name: - duplicates.append((name, member.name)) - if duplicates: - duplicate_names = ', '.join( - ["%s -> %s" % (alias, name) for (alias, name) in duplicates] - ) - raise ValueError('duplicate names found in %r: %s' % - (enumeration, duplicate_names) - ) - return enumeration diff --git a/lib/enum34/test_enum.py b/lib/enum34/test_enum.py deleted file mode 100644 index d7a9794215..0000000000 --- a/lib/enum34/test_enum.py +++ /dev/null @@ -1,1690 +0,0 @@ -import enum -import sys -import unittest -from enum import Enum, IntEnum, unique, EnumMeta -from pickle import dumps, loads, PicklingError, HIGHEST_PROTOCOL - -pyver = float('%s.%s' % sys.version_info[:2]) - -try: - any -except NameError: - def any(iterable): - for element in iterable: - if element: - return True - return False - -try: - unicode -except NameError: - unicode = str - -try: - from collections import OrderedDict -except ImportError: - OrderedDict = None - -# for pickle tests -try: - class Stooges(Enum): - LARRY = 1 - CURLY = 2 - MOE = 3 -except Exception: - Stooges = sys.exc_info()[1] - -try: - class IntStooges(int, Enum): - LARRY = 1 - CURLY = 2 - MOE = 3 -except Exception: - IntStooges = sys.exc_info()[1] - -try: - class FloatStooges(float, Enum): - LARRY = 1.39 - CURLY = 2.72 - MOE = 3.142596 -except Exception: - FloatStooges = sys.exc_info()[1] - -# for pickle test and subclass tests -try: - class StrEnum(str, Enum): - 'accepts only string values' - class Name(StrEnum): - BDFL = 'Guido van Rossum' - FLUFL = 'Barry Warsaw' -except Exception: - Name = sys.exc_info()[1] - -try: - Question = Enum('Question', 'who what when where why', module=__name__) -except Exception: - Question = sys.exc_info()[1] - -try: - Answer = Enum('Answer', 'him this then there because') -except Exception: - Answer = sys.exc_info()[1] - -try: - Theory = Enum('Theory', 'rule law supposition', qualname='spanish_inquisition') -except Exception: - Theory = sys.exc_info()[1] - -# for doctests -try: - class Fruit(Enum): - tomato = 1 - banana = 2 - cherry = 3 -except Exception: - pass - -def test_pickle_dump_load(assertion, source, target=None, - protocol=(0, HIGHEST_PROTOCOL)): - start, stop = protocol - failures = [] - for protocol in range(start, stop+1): - try: - if target is None: - assertion(loads(dumps(source, protocol=protocol)) is source) - else: - assertion(loads(dumps(source, protocol=protocol)), target) - except Exception: - exc, tb = sys.exc_info()[1:] - failures.append('%2d: %s' %(protocol, exc)) - if failures: - raise ValueError('Failed with protocols: %s' % ', '.join(failures)) - -def test_pickle_exception(assertion, exception, obj, - protocol=(0, HIGHEST_PROTOCOL)): - start, stop = protocol - failures = [] - for protocol in range(start, stop+1): - try: - assertion(exception, dumps, obj, protocol=protocol) - except Exception: - exc = sys.exc_info()[1] - failures.append('%d: %s %s' % (protocol, exc.__class__.__name__, exc)) - if failures: - raise ValueError('Failed with protocols: %s' % ', '.join(failures)) - - -class TestHelpers(unittest.TestCase): - # _is_descriptor, _is_sunder, _is_dunder - - def test_is_descriptor(self): - class foo: - pass - for attr in ('__get__','__set__','__delete__'): - obj = foo() - self.assertFalse(enum._is_descriptor(obj)) - setattr(obj, attr, 1) - self.assertTrue(enum._is_descriptor(obj)) - - def test_is_sunder(self): - for s in ('_a_', '_aa_'): - self.assertTrue(enum._is_sunder(s)) - - for s in ('a', 'a_', '_a', '__a', 'a__', '__a__', '_a__', '__a_', '_', - '__', '___', '____', '_____',): - self.assertFalse(enum._is_sunder(s)) - - def test_is_dunder(self): - for s in ('__a__', '__aa__'): - self.assertTrue(enum._is_dunder(s)) - for s in ('a', 'a_', '_a', '__a', 'a__', '_a_', '_a__', '__a_', '_', - '__', '___', '____', '_____',): - self.assertFalse(enum._is_dunder(s)) - - -class TestEnum(unittest.TestCase): - def setUp(self): - class Season(Enum): - SPRING = 1 - SUMMER = 2 - AUTUMN = 3 - WINTER = 4 - self.Season = Season - - class Konstants(float, Enum): - E = 2.7182818 - PI = 3.1415926 - TAU = 2 * PI - self.Konstants = Konstants - - class Grades(IntEnum): - A = 5 - B = 4 - C = 3 - D = 2 - F = 0 - self.Grades = Grades - - class Directional(str, Enum): - EAST = 'east' - WEST = 'west' - NORTH = 'north' - SOUTH = 'south' - self.Directional = Directional - - from datetime import date - class Holiday(date, Enum): - NEW_YEAR = 2013, 1, 1 - IDES_OF_MARCH = 2013, 3, 15 - self.Holiday = Holiday - - if pyver >= 2.6: # cannot specify custom `dir` on previous versions - def test_dir_on_class(self): - Season = self.Season - self.assertEqual( - set(dir(Season)), - set(['__class__', '__doc__', '__members__', '__module__', - 'SPRING', 'SUMMER', 'AUTUMN', 'WINTER']), - ) - - def test_dir_on_item(self): - Season = self.Season - self.assertEqual( - set(dir(Season.WINTER)), - set(['__class__', '__doc__', '__module__', 'name', 'value']), - ) - - def test_dir_on_sub_with_behavior_on_super(self): - # see issue22506 - class SuperEnum(Enum): - def invisible(self): - return "did you see me?" - class SubEnum(SuperEnum): - sample = 5 - self.assertEqual( - set(dir(SubEnum.sample)), - set(['__class__', '__doc__', '__module__', 'name', 'value', 'invisible']), - ) - - if pyver >= 2.7: # OrderedDict first available here - def test_members_is_ordereddict_if_ordered(self): - class Ordered(Enum): - __order__ = 'first second third' - first = 'bippity' - second = 'boppity' - third = 'boo' - self.assertTrue(type(Ordered.__members__) is OrderedDict) - - def test_members_is_ordereddict_if_not_ordered(self): - class Unordered(Enum): - this = 'that' - these = 'those' - self.assertTrue(type(Unordered.__members__) is OrderedDict) - - if pyver >= 3.0: # all objects are ordered in Python 2.x - def test_members_is_always_ordered(self): - class AlwaysOrdered(Enum): - first = 1 - second = 2 - third = 3 - self.assertTrue(type(AlwaysOrdered.__members__) is OrderedDict) - - def test_comparisons(self): - def bad_compare(): - Season.SPRING > 4 - Season = self.Season - self.assertNotEqual(Season.SPRING, 1) - self.assertRaises(TypeError, bad_compare) - - class Part(Enum): - SPRING = 1 - CLIP = 2 - BARREL = 3 - - self.assertNotEqual(Season.SPRING, Part.SPRING) - def bad_compare(): - Season.SPRING < Part.CLIP - self.assertRaises(TypeError, bad_compare) - - def test_enum_in_enum_out(self): - Season = self.Season - self.assertTrue(Season(Season.WINTER) is Season.WINTER) - - def test_enum_value(self): - Season = self.Season - self.assertEqual(Season.SPRING.value, 1) - - def test_intenum_value(self): - self.assertEqual(IntStooges.CURLY.value, 2) - - def test_enum(self): - Season = self.Season - lst = list(Season) - self.assertEqual(len(lst), len(Season)) - self.assertEqual(len(Season), 4, Season) - self.assertEqual( - [Season.SPRING, Season.SUMMER, Season.AUTUMN, Season.WINTER], lst) - - for i, season in enumerate('SPRING SUMMER AUTUMN WINTER'.split()): - i += 1 - e = Season(i) - self.assertEqual(e, getattr(Season, season)) - self.assertEqual(e.value, i) - self.assertNotEqual(e, i) - self.assertEqual(e.name, season) - self.assertTrue(e in Season) - self.assertTrue(type(e) is Season) - self.assertTrue(isinstance(e, Season)) - self.assertEqual(str(e), 'Season.' + season) - self.assertEqual( - repr(e), - '' % (season, i), - ) - - def test_value_name(self): - Season = self.Season - self.assertEqual(Season.SPRING.name, 'SPRING') - self.assertEqual(Season.SPRING.value, 1) - def set_name(obj, new_value): - obj.name = new_value - def set_value(obj, new_value): - obj.value = new_value - self.assertRaises(AttributeError, set_name, Season.SPRING, 'invierno', ) - self.assertRaises(AttributeError, set_value, Season.SPRING, 2) - - def test_attribute_deletion(self): - class Season(Enum): - SPRING = 1 - SUMMER = 2 - AUTUMN = 3 - WINTER = 4 - - def spam(cls): - pass - - self.assertTrue(hasattr(Season, 'spam')) - del Season.spam - self.assertFalse(hasattr(Season, 'spam')) - - self.assertRaises(AttributeError, delattr, Season, 'SPRING') - self.assertRaises(AttributeError, delattr, Season, 'DRY') - self.assertRaises(AttributeError, delattr, Season.SPRING, 'name') - - def test_invalid_names(self): - def create_bad_class_1(): - class Wrong(Enum): - mro = 9 - def create_bad_class_2(): - class Wrong(Enum): - _reserved_ = 3 - self.assertRaises(ValueError, create_bad_class_1) - self.assertRaises(ValueError, create_bad_class_2) - - def test_contains(self): - Season = self.Season - self.assertTrue(Season.AUTUMN in Season) - self.assertTrue(3 not in Season) - - val = Season(3) - self.assertTrue(val in Season) - - class OtherEnum(Enum): - one = 1; two = 2 - self.assertTrue(OtherEnum.two not in Season) - - if pyver >= 2.6: # when `format` came into being - - def test_format_enum(self): - Season = self.Season - self.assertEqual('{0}'.format(Season.SPRING), - '{0}'.format(str(Season.SPRING))) - self.assertEqual( '{0:}'.format(Season.SPRING), - '{0:}'.format(str(Season.SPRING))) - self.assertEqual('{0:20}'.format(Season.SPRING), - '{0:20}'.format(str(Season.SPRING))) - self.assertEqual('{0:^20}'.format(Season.SPRING), - '{0:^20}'.format(str(Season.SPRING))) - self.assertEqual('{0:>20}'.format(Season.SPRING), - '{0:>20}'.format(str(Season.SPRING))) - self.assertEqual('{0:<20}'.format(Season.SPRING), - '{0:<20}'.format(str(Season.SPRING))) - - def test_format_enum_custom(self): - class TestFloat(float, Enum): - one = 1.0 - two = 2.0 - def __format__(self, spec): - return 'TestFloat success!' - self.assertEqual('{0}'.format(TestFloat.one), 'TestFloat success!') - - def assertFormatIsValue(self, spec, member): - self.assertEqual(spec.format(member), spec.format(member.value)) - - def test_format_enum_date(self): - Holiday = self.Holiday - self.assertFormatIsValue('{0}', Holiday.IDES_OF_MARCH) - self.assertFormatIsValue('{0:}', Holiday.IDES_OF_MARCH) - self.assertFormatIsValue('{0:20}', Holiday.IDES_OF_MARCH) - self.assertFormatIsValue('{0:^20}', Holiday.IDES_OF_MARCH) - self.assertFormatIsValue('{0:>20}', Holiday.IDES_OF_MARCH) - self.assertFormatIsValue('{0:<20}', Holiday.IDES_OF_MARCH) - self.assertFormatIsValue('{0:%Y %m}', Holiday.IDES_OF_MARCH) - self.assertFormatIsValue('{0:%Y %m %M:00}', Holiday.IDES_OF_MARCH) - - def test_format_enum_float(self): - Konstants = self.Konstants - self.assertFormatIsValue('{0}', Konstants.TAU) - self.assertFormatIsValue('{0:}', Konstants.TAU) - self.assertFormatIsValue('{0:20}', Konstants.TAU) - self.assertFormatIsValue('{0:^20}', Konstants.TAU) - self.assertFormatIsValue('{0:>20}', Konstants.TAU) - self.assertFormatIsValue('{0:<20}', Konstants.TAU) - self.assertFormatIsValue('{0:n}', Konstants.TAU) - self.assertFormatIsValue('{0:5.2}', Konstants.TAU) - self.assertFormatIsValue('{0:f}', Konstants.TAU) - - def test_format_enum_int(self): - Grades = self.Grades - self.assertFormatIsValue('{0}', Grades.C) - self.assertFormatIsValue('{0:}', Grades.C) - self.assertFormatIsValue('{0:20}', Grades.C) - self.assertFormatIsValue('{0:^20}', Grades.C) - self.assertFormatIsValue('{0:>20}', Grades.C) - self.assertFormatIsValue('{0:<20}', Grades.C) - self.assertFormatIsValue('{0:+}', Grades.C) - self.assertFormatIsValue('{0:08X}', Grades.C) - self.assertFormatIsValue('{0:b}', Grades.C) - - def test_format_enum_str(self): - Directional = self.Directional - self.assertFormatIsValue('{0}', Directional.WEST) - self.assertFormatIsValue('{0:}', Directional.WEST) - self.assertFormatIsValue('{0:20}', Directional.WEST) - self.assertFormatIsValue('{0:^20}', Directional.WEST) - self.assertFormatIsValue('{0:>20}', Directional.WEST) - self.assertFormatIsValue('{0:<20}', Directional.WEST) - - def test_hash(self): - Season = self.Season - dates = {} - dates[Season.WINTER] = '1225' - dates[Season.SPRING] = '0315' - dates[Season.SUMMER] = '0704' - dates[Season.AUTUMN] = '1031' - self.assertEqual(dates[Season.AUTUMN], '1031') - - def test_enum_duplicates(self): - __order__ = "SPRING SUMMER AUTUMN WINTER" - class Season(Enum): - SPRING = 1 - SUMMER = 2 - AUTUMN = FALL = 3 - WINTER = 4 - ANOTHER_SPRING = 1 - lst = list(Season) - self.assertEqual( - lst, - [Season.SPRING, Season.SUMMER, - Season.AUTUMN, Season.WINTER, - ]) - self.assertTrue(Season.FALL is Season.AUTUMN) - self.assertEqual(Season.FALL.value, 3) - self.assertEqual(Season.AUTUMN.value, 3) - self.assertTrue(Season(3) is Season.AUTUMN) - self.assertTrue(Season(1) is Season.SPRING) - self.assertEqual(Season.FALL.name, 'AUTUMN') - self.assertEqual( - set([k for k,v in Season.__members__.items() if v.name != k]), - set(['FALL', 'ANOTHER_SPRING']), - ) - - if pyver >= 3.0: - cls = vars() - result = {'Enum':Enum} - exec("""def test_duplicate_name(self): - with self.assertRaises(TypeError): - class Color(Enum): - red = 1 - green = 2 - blue = 3 - red = 4 - - with self.assertRaises(TypeError): - class Color(Enum): - red = 1 - green = 2 - blue = 3 - def red(self): - return 'red' - - with self.assertRaises(TypeError): - class Color(Enum): - @property - - def red(self): - return 'redder' - red = 1 - green = 2 - blue = 3""", - result) - cls['test_duplicate_name'] = result['test_duplicate_name'] - - def test_enum_with_value_name(self): - class Huh(Enum): - name = 1 - value = 2 - self.assertEqual( - list(Huh), - [Huh.name, Huh.value], - ) - self.assertTrue(type(Huh.name) is Huh) - self.assertEqual(Huh.name.name, 'name') - self.assertEqual(Huh.name.value, 1) - - def test_intenum_from_scratch(self): - class phy(int, Enum): - pi = 3 - tau = 2 * pi - self.assertTrue(phy.pi < phy.tau) - - def test_intenum_inherited(self): - class IntEnum(int, Enum): - pass - class phy(IntEnum): - pi = 3 - tau = 2 * pi - self.assertTrue(phy.pi < phy.tau) - - def test_floatenum_from_scratch(self): - class phy(float, Enum): - pi = 3.1415926 - tau = 2 * pi - self.assertTrue(phy.pi < phy.tau) - - def test_floatenum_inherited(self): - class FloatEnum(float, Enum): - pass - class phy(FloatEnum): - pi = 3.1415926 - tau = 2 * pi - self.assertTrue(phy.pi < phy.tau) - - def test_strenum_from_scratch(self): - class phy(str, Enum): - pi = 'Pi' - tau = 'Tau' - self.assertTrue(phy.pi < phy.tau) - - def test_strenum_inherited(self): - class StrEnum(str, Enum): - pass - class phy(StrEnum): - pi = 'Pi' - tau = 'Tau' - self.assertTrue(phy.pi < phy.tau) - - def test_intenum(self): - class WeekDay(IntEnum): - SUNDAY = 1 - MONDAY = 2 - TUESDAY = 3 - WEDNESDAY = 4 - THURSDAY = 5 - FRIDAY = 6 - SATURDAY = 7 - - self.assertEqual(['a', 'b', 'c'][WeekDay.MONDAY], 'c') - self.assertEqual([i for i in range(WeekDay.TUESDAY)], [0, 1, 2]) - - lst = list(WeekDay) - self.assertEqual(len(lst), len(WeekDay)) - self.assertEqual(len(WeekDay), 7) - target = 'SUNDAY MONDAY TUESDAY WEDNESDAY THURSDAY FRIDAY SATURDAY' - target = target.split() - for i, weekday in enumerate(target): - i += 1 - e = WeekDay(i) - self.assertEqual(e, i) - self.assertEqual(int(e), i) - self.assertEqual(e.name, weekday) - self.assertTrue(e in WeekDay) - self.assertEqual(lst.index(e)+1, i) - self.assertTrue(0 < e < 8) - self.assertTrue(type(e) is WeekDay) - self.assertTrue(isinstance(e, int)) - self.assertTrue(isinstance(e, Enum)) - - def test_intenum_duplicates(self): - class WeekDay(IntEnum): - __order__ = 'SUNDAY MONDAY TUESDAY WEDNESDAY THURSDAY FRIDAY SATURDAY' - SUNDAY = 1 - MONDAY = 2 - TUESDAY = TEUSDAY = 3 - WEDNESDAY = 4 - THURSDAY = 5 - FRIDAY = 6 - SATURDAY = 7 - self.assertTrue(WeekDay.TEUSDAY is WeekDay.TUESDAY) - self.assertEqual(WeekDay(3).name, 'TUESDAY') - self.assertEqual([k for k,v in WeekDay.__members__.items() - if v.name != k], ['TEUSDAY', ]) - - def test_pickle_enum(self): - if isinstance(Stooges, Exception): - raise Stooges - test_pickle_dump_load(self.assertTrue, Stooges.CURLY) - test_pickle_dump_load(self.assertTrue, Stooges) - - def test_pickle_int(self): - if isinstance(IntStooges, Exception): - raise IntStooges - test_pickle_dump_load(self.assertTrue, IntStooges.CURLY) - test_pickle_dump_load(self.assertTrue, IntStooges) - - def test_pickle_float(self): - if isinstance(FloatStooges, Exception): - raise FloatStooges - test_pickle_dump_load(self.assertTrue, FloatStooges.CURLY) - test_pickle_dump_load(self.assertTrue, FloatStooges) - - def test_pickle_enum_function(self): - if isinstance(Answer, Exception): - raise Answer - test_pickle_dump_load(self.assertTrue, Answer.him) - test_pickle_dump_load(self.assertTrue, Answer) - - def test_pickle_enum_function_with_module(self): - if isinstance(Question, Exception): - raise Question - test_pickle_dump_load(self.assertTrue, Question.who) - test_pickle_dump_load(self.assertTrue, Question) - - if pyver >= 3.4: - def test_class_nested_enum_and_pickle_protocol_four(self): - # would normally just have this directly in the class namespace - class NestedEnum(Enum): - twigs = 'common' - shiny = 'rare' - - self.__class__.NestedEnum = NestedEnum - self.NestedEnum.__qualname__ = '%s.NestedEnum' % self.__class__.__name__ - test_pickle_exception( - self.assertRaises, PicklingError, self.NestedEnum.twigs, - protocol=(0, 3)) - test_pickle_dump_load(self.assertTrue, self.NestedEnum.twigs, - protocol=(4, HIGHEST_PROTOCOL)) - - def test_exploding_pickle(self): - BadPickle = Enum('BadPickle', 'dill sweet bread-n-butter') - enum._make_class_unpicklable(BadPickle) - globals()['BadPickle'] = BadPickle - test_pickle_exception(self.assertRaises, TypeError, BadPickle.dill) - test_pickle_exception(self.assertRaises, PicklingError, BadPickle) - - def test_string_enum(self): - class SkillLevel(str, Enum): - master = 'what is the sound of one hand clapping?' - journeyman = 'why did the chicken cross the road?' - apprentice = 'knock, knock!' - self.assertEqual(SkillLevel.apprentice, 'knock, knock!') - - def test_getattr_getitem(self): - class Period(Enum): - morning = 1 - noon = 2 - evening = 3 - night = 4 - self.assertTrue(Period(2) is Period.noon) - self.assertTrue(getattr(Period, 'night') is Period.night) - self.assertTrue(Period['morning'] is Period.morning) - - def test_getattr_dunder(self): - Season = self.Season - self.assertTrue(getattr(Season, '__hash__')) - - def test_iteration_order(self): - class Season(Enum): - __order__ = 'SUMMER WINTER AUTUMN SPRING' - SUMMER = 2 - WINTER = 4 - AUTUMN = 3 - SPRING = 1 - self.assertEqual( - list(Season), - [Season.SUMMER, Season.WINTER, Season.AUTUMN, Season.SPRING], - ) - - def test_iteration_order_with_unorderable_values(self): - class Complex(Enum): - a = complex(7, 9) - b = complex(3.14, 2) - c = complex(1, -1) - d = complex(-77, 32) - self.assertEqual( - list(Complex), - [Complex.a, Complex.b, Complex.c, Complex.d], - ) - - def test_programatic_function_string(self): - SummerMonth = Enum('SummerMonth', 'june july august') - lst = list(SummerMonth) - self.assertEqual(len(lst), len(SummerMonth)) - self.assertEqual(len(SummerMonth), 3, SummerMonth) - self.assertEqual( - [SummerMonth.june, SummerMonth.july, SummerMonth.august], - lst, - ) - for i, month in enumerate('june july august'.split()): - i += 1 - e = SummerMonth(i) - self.assertEqual(int(e.value), i) - self.assertNotEqual(e, i) - self.assertEqual(e.name, month) - self.assertTrue(e in SummerMonth) - self.assertTrue(type(e) is SummerMonth) - - def test_programatic_function_string_list(self): - SummerMonth = Enum('SummerMonth', ['june', 'july', 'august']) - lst = list(SummerMonth) - self.assertEqual(len(lst), len(SummerMonth)) - self.assertEqual(len(SummerMonth), 3, SummerMonth) - self.assertEqual( - [SummerMonth.june, SummerMonth.july, SummerMonth.august], - lst, - ) - for i, month in enumerate('june july august'.split()): - i += 1 - e = SummerMonth(i) - self.assertEqual(int(e.value), i) - self.assertNotEqual(e, i) - self.assertEqual(e.name, month) - self.assertTrue(e in SummerMonth) - self.assertTrue(type(e) is SummerMonth) - - def test_programatic_function_iterable(self): - SummerMonth = Enum( - 'SummerMonth', - (('june', 1), ('july', 2), ('august', 3)) - ) - lst = list(SummerMonth) - self.assertEqual(len(lst), len(SummerMonth)) - self.assertEqual(len(SummerMonth), 3, SummerMonth) - self.assertEqual( - [SummerMonth.june, SummerMonth.july, SummerMonth.august], - lst, - ) - for i, month in enumerate('june july august'.split()): - i += 1 - e = SummerMonth(i) - self.assertEqual(int(e.value), i) - self.assertNotEqual(e, i) - self.assertEqual(e.name, month) - self.assertTrue(e in SummerMonth) - self.assertTrue(type(e) is SummerMonth) - - def test_programatic_function_from_dict(self): - SummerMonth = Enum( - 'SummerMonth', - dict((('june', 1), ('july', 2), ('august', 3))) - ) - lst = list(SummerMonth) - self.assertEqual(len(lst), len(SummerMonth)) - self.assertEqual(len(SummerMonth), 3, SummerMonth) - if pyver < 3.0: - self.assertEqual( - [SummerMonth.june, SummerMonth.july, SummerMonth.august], - lst, - ) - for i, month in enumerate('june july august'.split()): - i += 1 - e = SummerMonth(i) - self.assertEqual(int(e.value), i) - self.assertNotEqual(e, i) - self.assertEqual(e.name, month) - self.assertTrue(e in SummerMonth) - self.assertTrue(type(e) is SummerMonth) - - def test_programatic_function_type(self): - SummerMonth = Enum('SummerMonth', 'june july august', type=int) - lst = list(SummerMonth) - self.assertEqual(len(lst), len(SummerMonth)) - self.assertEqual(len(SummerMonth), 3, SummerMonth) - self.assertEqual( - [SummerMonth.june, SummerMonth.july, SummerMonth.august], - lst, - ) - for i, month in enumerate('june july august'.split()): - i += 1 - e = SummerMonth(i) - self.assertEqual(e, i) - self.assertEqual(e.name, month) - self.assertTrue(e in SummerMonth) - self.assertTrue(type(e) is SummerMonth) - - def test_programatic_function_type_from_subclass(self): - SummerMonth = IntEnum('SummerMonth', 'june july august') - lst = list(SummerMonth) - self.assertEqual(len(lst), len(SummerMonth)) - self.assertEqual(len(SummerMonth), 3, SummerMonth) - self.assertEqual( - [SummerMonth.june, SummerMonth.july, SummerMonth.august], - lst, - ) - for i, month in enumerate('june july august'.split()): - i += 1 - e = SummerMonth(i) - self.assertEqual(e, i) - self.assertEqual(e.name, month) - self.assertTrue(e in SummerMonth) - self.assertTrue(type(e) is SummerMonth) - - def test_programatic_function_unicode(self): - SummerMonth = Enum('SummerMonth', unicode('june july august')) - lst = list(SummerMonth) - self.assertEqual(len(lst), len(SummerMonth)) - self.assertEqual(len(SummerMonth), 3, SummerMonth) - self.assertEqual( - [SummerMonth.june, SummerMonth.july, SummerMonth.august], - lst, - ) - for i, month in enumerate(unicode('june july august').split()): - i += 1 - e = SummerMonth(i) - self.assertEqual(int(e.value), i) - self.assertNotEqual(e, i) - self.assertEqual(e.name, month) - self.assertTrue(e in SummerMonth) - self.assertTrue(type(e) is SummerMonth) - - def test_programatic_function_unicode_list(self): - SummerMonth = Enum('SummerMonth', [unicode('june'), unicode('july'), unicode('august')]) - lst = list(SummerMonth) - self.assertEqual(len(lst), len(SummerMonth)) - self.assertEqual(len(SummerMonth), 3, SummerMonth) - self.assertEqual( - [SummerMonth.june, SummerMonth.july, SummerMonth.august], - lst, - ) - for i, month in enumerate(unicode('june july august').split()): - i += 1 - e = SummerMonth(i) - self.assertEqual(int(e.value), i) - self.assertNotEqual(e, i) - self.assertEqual(e.name, month) - self.assertTrue(e in SummerMonth) - self.assertTrue(type(e) is SummerMonth) - - def test_programatic_function_unicode_iterable(self): - SummerMonth = Enum( - 'SummerMonth', - ((unicode('june'), 1), (unicode('july'), 2), (unicode('august'), 3)) - ) - lst = list(SummerMonth) - self.assertEqual(len(lst), len(SummerMonth)) - self.assertEqual(len(SummerMonth), 3, SummerMonth) - self.assertEqual( - [SummerMonth.june, SummerMonth.july, SummerMonth.august], - lst, - ) - for i, month in enumerate(unicode('june july august').split()): - i += 1 - e = SummerMonth(i) - self.assertEqual(int(e.value), i) - self.assertNotEqual(e, i) - self.assertEqual(e.name, month) - self.assertTrue(e in SummerMonth) - self.assertTrue(type(e) is SummerMonth) - - def test_programatic_function_from_unicode_dict(self): - SummerMonth = Enum( - 'SummerMonth', - dict(((unicode('june'), 1), (unicode('july'), 2), (unicode('august'), 3))) - ) - lst = list(SummerMonth) - self.assertEqual(len(lst), len(SummerMonth)) - self.assertEqual(len(SummerMonth), 3, SummerMonth) - if pyver < 3.0: - self.assertEqual( - [SummerMonth.june, SummerMonth.july, SummerMonth.august], - lst, - ) - for i, month in enumerate(unicode('june july august').split()): - i += 1 - e = SummerMonth(i) - self.assertEqual(int(e.value), i) - self.assertNotEqual(e, i) - self.assertEqual(e.name, month) - self.assertTrue(e in SummerMonth) - self.assertTrue(type(e) is SummerMonth) - - def test_programatic_function_unicode_type(self): - SummerMonth = Enum('SummerMonth', unicode('june july august'), type=int) - lst = list(SummerMonth) - self.assertEqual(len(lst), len(SummerMonth)) - self.assertEqual(len(SummerMonth), 3, SummerMonth) - self.assertEqual( - [SummerMonth.june, SummerMonth.july, SummerMonth.august], - lst, - ) - for i, month in enumerate(unicode('june july august').split()): - i += 1 - e = SummerMonth(i) - self.assertEqual(e, i) - self.assertEqual(e.name, month) - self.assertTrue(e in SummerMonth) - self.assertTrue(type(e) is SummerMonth) - - def test_programatic_function_unicode_type_from_subclass(self): - SummerMonth = IntEnum('SummerMonth', unicode('june july august')) - lst = list(SummerMonth) - self.assertEqual(len(lst), len(SummerMonth)) - self.assertEqual(len(SummerMonth), 3, SummerMonth) - self.assertEqual( - [SummerMonth.june, SummerMonth.july, SummerMonth.august], - lst, - ) - for i, month in enumerate(unicode('june july august').split()): - i += 1 - e = SummerMonth(i) - self.assertEqual(e, i) - self.assertEqual(e.name, month) - self.assertTrue(e in SummerMonth) - self.assertTrue(type(e) is SummerMonth) - - def test_programmatic_function_unicode_class(self): - if pyver < 3.0: - class_names = unicode('SummerMonth'), 'S\xfcmm\xe9rM\xf6nth'.decode('latin1') - else: - class_names = 'SummerMonth', 'S\xfcmm\xe9rM\xf6nth' - for i, class_name in enumerate(class_names): - if pyver < 3.0 and i == 1: - self.assertRaises(TypeError, Enum, class_name, unicode('june july august')) - else: - SummerMonth = Enum(class_name, unicode('june july august')) - lst = list(SummerMonth) - self.assertEqual(len(lst), len(SummerMonth)) - self.assertEqual(len(SummerMonth), 3, SummerMonth) - self.assertEqual( - [SummerMonth.june, SummerMonth.july, SummerMonth.august], - lst, - ) - for i, month in enumerate(unicode('june july august').split()): - i += 1 - e = SummerMonth(i) - self.assertEqual(e.value, i) - self.assertEqual(e.name, month) - self.assertTrue(e in SummerMonth) - self.assertTrue(type(e) is SummerMonth) - - def test_subclassing(self): - if isinstance(Name, Exception): - raise Name - self.assertEqual(Name.BDFL, 'Guido van Rossum') - self.assertTrue(Name.BDFL, Name('Guido van Rossum')) - self.assertTrue(Name.BDFL is getattr(Name, 'BDFL')) - test_pickle_dump_load(self.assertTrue, Name.BDFL) - - def test_extending(self): - def bad_extension(): - class Color(Enum): - red = 1 - green = 2 - blue = 3 - class MoreColor(Color): - cyan = 4 - magenta = 5 - yellow = 6 - self.assertRaises(TypeError, bad_extension) - - def test_exclude_methods(self): - class whatever(Enum): - this = 'that' - these = 'those' - def really(self): - return 'no, not %s' % self.value - self.assertFalse(type(whatever.really) is whatever) - self.assertEqual(whatever.this.really(), 'no, not that') - - def test_wrong_inheritance_order(self): - def wrong_inherit(): - class Wrong(Enum, str): - NotHere = 'error before this point' - self.assertRaises(TypeError, wrong_inherit) - - def test_intenum_transitivity(self): - class number(IntEnum): - one = 1 - two = 2 - three = 3 - class numero(IntEnum): - uno = 1 - dos = 2 - tres = 3 - self.assertEqual(number.one, numero.uno) - self.assertEqual(number.two, numero.dos) - self.assertEqual(number.three, numero.tres) - - def test_introspection(self): - class Number(IntEnum): - one = 100 - two = 200 - self.assertTrue(Number.one._member_type_ is int) - self.assertTrue(Number._member_type_ is int) - class String(str, Enum): - yarn = 'soft' - rope = 'rough' - wire = 'hard' - self.assertTrue(String.yarn._member_type_ is str) - self.assertTrue(String._member_type_ is str) - class Plain(Enum): - vanilla = 'white' - one = 1 - self.assertTrue(Plain.vanilla._member_type_ is object) - self.assertTrue(Plain._member_type_ is object) - - def test_wrong_enum_in_call(self): - class Monochrome(Enum): - black = 0 - white = 1 - class Gender(Enum): - male = 0 - female = 1 - self.assertRaises(ValueError, Monochrome, Gender.male) - - def test_wrong_enum_in_mixed_call(self): - class Monochrome(IntEnum): - black = 0 - white = 1 - class Gender(Enum): - male = 0 - female = 1 - self.assertRaises(ValueError, Monochrome, Gender.male) - - def test_mixed_enum_in_call_1(self): - class Monochrome(IntEnum): - black = 0 - white = 1 - class Gender(IntEnum): - male = 0 - female = 1 - self.assertTrue(Monochrome(Gender.female) is Monochrome.white) - - def test_mixed_enum_in_call_2(self): - class Monochrome(Enum): - black = 0 - white = 1 - class Gender(IntEnum): - male = 0 - female = 1 - self.assertTrue(Monochrome(Gender.male) is Monochrome.black) - - def test_flufl_enum(self): - class Fluflnum(Enum): - def __int__(self): - return int(self.value) - class MailManOptions(Fluflnum): - option1 = 1 - option2 = 2 - option3 = 3 - self.assertEqual(int(MailManOptions.option1), 1) - - def test_no_such_enum_member(self): - class Color(Enum): - red = 1 - green = 2 - blue = 3 - self.assertRaises(ValueError, Color, 4) - self.assertRaises(KeyError, Color.__getitem__, 'chartreuse') - - def test_new_repr(self): - class Color(Enum): - red = 1 - green = 2 - blue = 3 - def __repr__(self): - return "don't you just love shades of %s?" % self.name - self.assertEqual( - repr(Color.blue), - "don't you just love shades of blue?", - ) - - def test_inherited_repr(self): - class MyEnum(Enum): - def __repr__(self): - return "My name is %s." % self.name - class MyIntEnum(int, MyEnum): - this = 1 - that = 2 - theother = 3 - self.assertEqual(repr(MyIntEnum.that), "My name is that.") - - def test_multiple_mixin_mro(self): - class auto_enum(EnumMeta): - def __new__(metacls, cls, bases, classdict): - original_dict = classdict - classdict = enum._EnumDict() - for k, v in original_dict.items(): - classdict[k] = v - temp = type(classdict)() - names = set(classdict._member_names) - i = 0 - for k in classdict._member_names: - v = classdict[k] - if v == (): - v = i - else: - i = v - i += 1 - temp[k] = v - for k, v in classdict.items(): - if k not in names: - temp[k] = v - return super(auto_enum, metacls).__new__( - metacls, cls, bases, temp) - - AutoNumberedEnum = auto_enum('AutoNumberedEnum', (Enum,), {}) - - AutoIntEnum = auto_enum('AutoIntEnum', (IntEnum,), {}) - - class TestAutoNumber(AutoNumberedEnum): - a = () - b = 3 - c = () - - class TestAutoInt(AutoIntEnum): - a = () - b = 3 - c = () - - def test_subclasses_with_getnewargs(self): - class NamedInt(int): - __qualname__ = 'NamedInt' # needed for pickle protocol 4 - def __new__(cls, *args): - _args = args - if len(args) < 1: - raise TypeError("name and value must be specified") - name, args = args[0], args[1:] - self = int.__new__(cls, *args) - self._intname = name - self._args = _args - return self - def __getnewargs__(self): - return self._args - @property - def __name__(self): - return self._intname - def __repr__(self): - # repr() is updated to include the name and type info - return "%s(%r, %s)" % (type(self).__name__, - self.__name__, - int.__repr__(self)) - def __str__(self): - # str() is unchanged, even if it relies on the repr() fallback - base = int - base_str = base.__str__ - if base_str.__objclass__ is object: - return base.__repr__(self) - return base_str(self) - # for simplicity, we only define one operator that - # propagates expressions - def __add__(self, other): - temp = int(self) + int( other) - if isinstance(self, NamedInt) and isinstance(other, NamedInt): - return NamedInt( - '(%s + %s)' % (self.__name__, other.__name__), - temp ) - else: - return temp - - class NEI(NamedInt, Enum): - __qualname__ = 'NEI' # needed for pickle protocol 4 - x = ('the-x', 1) - y = ('the-y', 2) - - self.assertTrue(NEI.__new__ is Enum.__new__) - self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)") - globals()['NamedInt'] = NamedInt - globals()['NEI'] = NEI - NI5 = NamedInt('test', 5) - self.assertEqual(NI5, 5) - test_pickle_dump_load(self.assertTrue, NI5, 5) - self.assertEqual(NEI.y.value, 2) - test_pickle_dump_load(self.assertTrue, NEI.y) - - if pyver >= 3.4: - def test_subclasses_with_getnewargs_ex(self): - class NamedInt(int): - __qualname__ = 'NamedInt' # needed for pickle protocol 4 - def __new__(cls, *args): - _args = args - if len(args) < 2: - raise TypeError("name and value must be specified") - name, args = args[0], args[1:] - self = int.__new__(cls, *args) - self._intname = name - self._args = _args - return self - def __getnewargs_ex__(self): - return self._args, {} - @property - def __name__(self): - return self._intname - def __repr__(self): - # repr() is updated to include the name and type info - return "{}({!r}, {})".format(type(self).__name__, - self.__name__, - int.__repr__(self)) - def __str__(self): - # str() is unchanged, even if it relies on the repr() fallback - base = int - base_str = base.__str__ - if base_str.__objclass__ is object: - return base.__repr__(self) - return base_str(self) - # for simplicity, we only define one operator that - # propagates expressions - def __add__(self, other): - temp = int(self) + int( other) - if isinstance(self, NamedInt) and isinstance(other, NamedInt): - return NamedInt( - '({0} + {1})'.format(self.__name__, other.__name__), - temp ) - else: - return temp - - class NEI(NamedInt, Enum): - __qualname__ = 'NEI' # needed for pickle protocol 4 - x = ('the-x', 1) - y = ('the-y', 2) - - - self.assertIs(NEI.__new__, Enum.__new__) - self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)") - globals()['NamedInt'] = NamedInt - globals()['NEI'] = NEI - NI5 = NamedInt('test', 5) - self.assertEqual(NI5, 5) - test_pickle_dump_load(self.assertEqual, NI5, 5, protocol=(4, HIGHEST_PROTOCOL)) - self.assertEqual(NEI.y.value, 2) - test_pickle_dump_load(self.assertTrue, NEI.y, protocol=(4, HIGHEST_PROTOCOL)) - - def test_subclasses_with_reduce(self): - class NamedInt(int): - __qualname__ = 'NamedInt' # needed for pickle protocol 4 - def __new__(cls, *args): - _args = args - if len(args) < 1: - raise TypeError("name and value must be specified") - name, args = args[0], args[1:] - self = int.__new__(cls, *args) - self._intname = name - self._args = _args - return self - def __reduce__(self): - return self.__class__, self._args - @property - def __name__(self): - return self._intname - def __repr__(self): - # repr() is updated to include the name and type info - return "%s(%r, %s)" % (type(self).__name__, - self.__name__, - int.__repr__(self)) - def __str__(self): - # str() is unchanged, even if it relies on the repr() fallback - base = int - base_str = base.__str__ - if base_str.__objclass__ is object: - return base.__repr__(self) - return base_str(self) - # for simplicity, we only define one operator that - # propagates expressions - def __add__(self, other): - temp = int(self) + int( other) - if isinstance(self, NamedInt) and isinstance(other, NamedInt): - return NamedInt( - '(%s + %s)' % (self.__name__, other.__name__), - temp ) - else: - return temp - - class NEI(NamedInt, Enum): - __qualname__ = 'NEI' # needed for pickle protocol 4 - x = ('the-x', 1) - y = ('the-y', 2) - - - self.assertTrue(NEI.__new__ is Enum.__new__) - self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)") - globals()['NamedInt'] = NamedInt - globals()['NEI'] = NEI - NI5 = NamedInt('test', 5) - self.assertEqual(NI5, 5) - test_pickle_dump_load(self.assertEqual, NI5, 5) - self.assertEqual(NEI.y.value, 2) - test_pickle_dump_load(self.assertTrue, NEI.y) - - def test_subclasses_with_reduce_ex(self): - class NamedInt(int): - __qualname__ = 'NamedInt' # needed for pickle protocol 4 - def __new__(cls, *args): - _args = args - if len(args) < 1: - raise TypeError("name and value must be specified") - name, args = args[0], args[1:] - self = int.__new__(cls, *args) - self._intname = name - self._args = _args - return self - def __reduce_ex__(self, proto): - return self.__class__, self._args - @property - def __name__(self): - return self._intname - def __repr__(self): - # repr() is updated to include the name and type info - return "%s(%r, %s)" % (type(self).__name__, - self.__name__, - int.__repr__(self)) - def __str__(self): - # str() is unchanged, even if it relies on the repr() fallback - base = int - base_str = base.__str__ - if base_str.__objclass__ is object: - return base.__repr__(self) - return base_str(self) - # for simplicity, we only define one operator that - # propagates expressions - def __add__(self, other): - temp = int(self) + int( other) - if isinstance(self, NamedInt) and isinstance(other, NamedInt): - return NamedInt( - '(%s + %s)' % (self.__name__, other.__name__), - temp ) - else: - return temp - - class NEI(NamedInt, Enum): - __qualname__ = 'NEI' # needed for pickle protocol 4 - x = ('the-x', 1) - y = ('the-y', 2) - - - self.assertTrue(NEI.__new__ is Enum.__new__) - self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)") - globals()['NamedInt'] = NamedInt - globals()['NEI'] = NEI - NI5 = NamedInt('test', 5) - self.assertEqual(NI5, 5) - test_pickle_dump_load(self.assertEqual, NI5, 5) - self.assertEqual(NEI.y.value, 2) - test_pickle_dump_load(self.assertTrue, NEI.y) - - def test_subclasses_without_direct_pickle_support(self): - class NamedInt(int): - __qualname__ = 'NamedInt' - def __new__(cls, *args): - _args = args - name, args = args[0], args[1:] - if len(args) == 0: - raise TypeError("name and value must be specified") - self = int.__new__(cls, *args) - self._intname = name - self._args = _args - return self - @property - def __name__(self): - return self._intname - def __repr__(self): - # repr() is updated to include the name and type info - return "%s(%r, %s)" % (type(self).__name__, - self.__name__, - int.__repr__(self)) - def __str__(self): - # str() is unchanged, even if it relies on the repr() fallback - base = int - base_str = base.__str__ - if base_str.__objclass__ is object: - return base.__repr__(self) - return base_str(self) - # for simplicity, we only define one operator that - # propagates expressions - def __add__(self, other): - temp = int(self) + int( other) - if isinstance(self, NamedInt) and isinstance(other, NamedInt): - return NamedInt( - '(%s + %s)' % (self.__name__, other.__name__), - temp ) - else: - return temp - - class NEI(NamedInt, Enum): - __qualname__ = 'NEI' - x = ('the-x', 1) - y = ('the-y', 2) - - self.assertTrue(NEI.__new__ is Enum.__new__) - self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)") - globals()['NamedInt'] = NamedInt - globals()['NEI'] = NEI - NI5 = NamedInt('test', 5) - self.assertEqual(NI5, 5) - self.assertEqual(NEI.y.value, 2) - test_pickle_exception(self.assertRaises, TypeError, NEI.x) - test_pickle_exception(self.assertRaises, PicklingError, NEI) - - def test_subclasses_without_direct_pickle_support_using_name(self): - class NamedInt(int): - __qualname__ = 'NamedInt' - def __new__(cls, *args): - _args = args - name, args = args[0], args[1:] - if len(args) == 0: - raise TypeError("name and value must be specified") - self = int.__new__(cls, *args) - self._intname = name - self._args = _args - return self - @property - def __name__(self): - return self._intname - def __repr__(self): - # repr() is updated to include the name and type info - return "%s(%r, %s)" % (type(self).__name__, - self.__name__, - int.__repr__(self)) - def __str__(self): - # str() is unchanged, even if it relies on the repr() fallback - base = int - base_str = base.__str__ - if base_str.__objclass__ is object: - return base.__repr__(self) - return base_str(self) - # for simplicity, we only define one operator that - # propagates expressions - def __add__(self, other): - temp = int(self) + int( other) - if isinstance(self, NamedInt) and isinstance(other, NamedInt): - return NamedInt( - '(%s + %s)' % (self.__name__, other.__name__), - temp ) - else: - return temp - - class NEI(NamedInt, Enum): - __qualname__ = 'NEI' - x = ('the-x', 1) - y = ('the-y', 2) - def __reduce_ex__(self, proto): - return getattr, (self.__class__, self._name_) - - self.assertTrue(NEI.__new__ is Enum.__new__) - self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)") - globals()['NamedInt'] = NamedInt - globals()['NEI'] = NEI - NI5 = NamedInt('test', 5) - self.assertEqual(NI5, 5) - self.assertEqual(NEI.y.value, 2) - test_pickle_dump_load(self.assertTrue, NEI.y) - test_pickle_dump_load(self.assertTrue, NEI) - - def test_tuple_subclass(self): - class SomeTuple(tuple, Enum): - __qualname__ = 'SomeTuple' - first = (1, 'for the money') - second = (2, 'for the show') - third = (3, 'for the music') - self.assertTrue(type(SomeTuple.first) is SomeTuple) - self.assertTrue(isinstance(SomeTuple.second, tuple)) - self.assertEqual(SomeTuple.third, (3, 'for the music')) - globals()['SomeTuple'] = SomeTuple - test_pickle_dump_load(self.assertTrue, SomeTuple.first) - - def test_duplicate_values_give_unique_enum_items(self): - class AutoNumber(Enum): - __order__ = 'enum_m enum_d enum_y' - enum_m = () - enum_d = () - enum_y = () - def __new__(cls): - value = len(cls.__members__) + 1 - obj = object.__new__(cls) - obj._value_ = value - return obj - def __int__(self): - return int(self._value_) - self.assertEqual(int(AutoNumber.enum_d), 2) - self.assertEqual(AutoNumber.enum_y.value, 3) - self.assertTrue(AutoNumber(1) is AutoNumber.enum_m) - self.assertEqual( - list(AutoNumber), - [AutoNumber.enum_m, AutoNumber.enum_d, AutoNumber.enum_y], - ) - - def test_inherited_new_from_enhanced_enum(self): - class AutoNumber2(Enum): - def __new__(cls): - value = len(cls.__members__) + 1 - obj = object.__new__(cls) - obj._value_ = value - return obj - def __int__(self): - return int(self._value_) - class Color(AutoNumber2): - __order__ = 'red green blue' - red = () - green = () - blue = () - self.assertEqual(len(Color), 3, "wrong number of elements: %d (should be %d)" % (len(Color), 3)) - self.assertEqual(list(Color), [Color.red, Color.green, Color.blue]) - if pyver >= 3.0: - self.assertEqual(list(map(int, Color)), [1, 2, 3]) - - def test_inherited_new_from_mixed_enum(self): - class AutoNumber3(IntEnum): - def __new__(cls): - value = len(cls.__members__) + 1 - obj = int.__new__(cls, value) - obj._value_ = value - return obj - class Color(AutoNumber3): - red = () - green = () - blue = () - self.assertEqual(len(Color), 3, "wrong number of elements: %d (should be %d)" % (len(Color), 3)) - Color.red - Color.green - Color.blue - - def test_ordered_mixin(self): - class OrderedEnum(Enum): - def __ge__(self, other): - if self.__class__ is other.__class__: - return self._value_ >= other._value_ - return NotImplemented - def __gt__(self, other): - if self.__class__ is other.__class__: - return self._value_ > other._value_ - return NotImplemented - def __le__(self, other): - if self.__class__ is other.__class__: - return self._value_ <= other._value_ - return NotImplemented - def __lt__(self, other): - if self.__class__ is other.__class__: - return self._value_ < other._value_ - return NotImplemented - class Grade(OrderedEnum): - __order__ = 'A B C D F' - A = 5 - B = 4 - C = 3 - D = 2 - F = 1 - self.assertEqual(list(Grade), [Grade.A, Grade.B, Grade.C, Grade.D, Grade.F]) - self.assertTrue(Grade.A > Grade.B) - self.assertTrue(Grade.F <= Grade.C) - self.assertTrue(Grade.D < Grade.A) - self.assertTrue(Grade.B >= Grade.B) - - def test_extending2(self): - def bad_extension(): - class Shade(Enum): - def shade(self): - print(self.name) - class Color(Shade): - red = 1 - green = 2 - blue = 3 - class MoreColor(Color): - cyan = 4 - magenta = 5 - yellow = 6 - self.assertRaises(TypeError, bad_extension) - - def test_extending3(self): - class Shade(Enum): - def shade(self): - return self.name - class Color(Shade): - def hex(self): - return '%s hexlified!' % self.value - class MoreColor(Color): - cyan = 4 - magenta = 5 - yellow = 6 - self.assertEqual(MoreColor.magenta.hex(), '5 hexlified!') - - def test_no_duplicates(self): - def bad_duplicates(): - class UniqueEnum(Enum): - def __init__(self, *args): - cls = self.__class__ - if any(self.value == e.value for e in cls): - a = self.name - e = cls(self.value).name - raise ValueError( - "aliases not allowed in UniqueEnum: %r --> %r" - % (a, e) - ) - class Color(UniqueEnum): - red = 1 - green = 2 - blue = 3 - class Color(UniqueEnum): - red = 1 - green = 2 - blue = 3 - grene = 2 - self.assertRaises(ValueError, bad_duplicates) - - def test_reversed(self): - self.assertEqual( - list(reversed(self.Season)), - [self.Season.WINTER, self.Season.AUTUMN, self.Season.SUMMER, - self.Season.SPRING] - ) - - def test_init(self): - class Planet(Enum): - MERCURY = (3.303e+23, 2.4397e6) - VENUS = (4.869e+24, 6.0518e6) - EARTH = (5.976e+24, 6.37814e6) - MARS = (6.421e+23, 3.3972e6) - JUPITER = (1.9e+27, 7.1492e7) - SATURN = (5.688e+26, 6.0268e7) - URANUS = (8.686e+25, 2.5559e7) - NEPTUNE = (1.024e+26, 2.4746e7) - def __init__(self, mass, radius): - self.mass = mass # in kilograms - self.radius = radius # in meters - @property - def surface_gravity(self): - # universal gravitational constant (m3 kg-1 s-2) - G = 6.67300E-11 - return G * self.mass / (self.radius * self.radius) - self.assertEqual(round(Planet.EARTH.surface_gravity, 2), 9.80) - self.assertEqual(Planet.EARTH.value, (5.976e+24, 6.37814e6)) - - def test_nonhash_value(self): - class AutoNumberInAList(Enum): - def __new__(cls): - value = [len(cls.__members__) + 1] - obj = object.__new__(cls) - obj._value_ = value - return obj - class ColorInAList(AutoNumberInAList): - __order__ = 'red green blue' - red = () - green = () - blue = () - self.assertEqual(list(ColorInAList), [ColorInAList.red, ColorInAList.green, ColorInAList.blue]) - self.assertEqual(ColorInAList.red.value, [1]) - self.assertEqual(ColorInAList([1]), ColorInAList.red) - - def test_conflicting_types_resolved_in_new(self): - class LabelledIntEnum(int, Enum): - def __new__(cls, *args): - value, label = args - obj = int.__new__(cls, value) - obj.label = label - obj._value_ = value - return obj - - class LabelledList(LabelledIntEnum): - unprocessed = (1, "Unprocessed") - payment_complete = (2, "Payment Complete") - - self.assertEqual(list(LabelledList), [LabelledList.unprocessed, LabelledList.payment_complete]) - self.assertEqual(LabelledList.unprocessed, 1) - self.assertEqual(LabelledList(1), LabelledList.unprocessed) - -class TestUnique(unittest.TestCase): - """2.4 doesn't allow class decorators, use function syntax.""" - - def test_unique_clean(self): - class Clean(Enum): - one = 1 - two = 'dos' - tres = 4.0 - unique(Clean) - class Cleaner(IntEnum): - single = 1 - double = 2 - triple = 3 - unique(Cleaner) - - def test_unique_dirty(self): - try: - class Dirty(Enum): - __order__ = 'one two tres' - one = 1 - two = 'dos' - tres = 1 - unique(Dirty) - except ValueError: - exc = sys.exc_info()[1] - message = exc.args[0] - self.assertTrue('tres -> one' in message) - - try: - class Dirtier(IntEnum): - __order__ = 'single double triple turkey' - single = 1 - double = 1 - triple = 3 - turkey = 3 - unique(Dirtier) - except ValueError: - exc = sys.exc_info()[1] - message = exc.args[0] - self.assertTrue('double -> single' in message) - self.assertTrue('turkey -> triple' in message) - - -class TestMe(unittest.TestCase): - - pass - -if __name__ == '__main__': - unittest.main() diff --git a/lib/enzyme/__init__.py b/lib/enzyme/__init__.py index 4ed31f4268..9a171b522e 100644 --- a/lib/enzyme/__init__.py +++ b/lib/enzyme/__init__.py @@ -9,8 +9,5 @@ from .exceptions import * from .mkv import * -class NullHandler(logging.Handler): - def emit(self, record): - pass -logging.getLogger(__name__).addHandler(NullHandler()) +logging.getLogger(__name__).addHandler(logging.NullHandler()) diff --git a/lib/html5lib/__init__.py b/lib/html5lib/__init__.py index 19a4b7d692..962536cba3 100644 --- a/lib/html5lib/__init__.py +++ b/lib/html5lib/__init__.py @@ -20,4 +20,6 @@ __all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder", "getTreeWalker", "serialize"] -__version__ = "0.999" + +# this has to be at the top level, see how setup.py parses this +__version__ = "0.9999999" diff --git a/lib/html5lib/constants.py b/lib/html5lib/constants.py index e7089846d5..d938e0ae66 100644 --- a/lib/html5lib/constants.py +++ b/lib/html5lib/constants.py @@ -1,292 +1,290 @@ from __future__ import absolute_import, division, unicode_literals import string -import gettext -_ = gettext.gettext EOF = None E = { "null-character": - _("Null character in input stream, replaced with U+FFFD."), + "Null character in input stream, replaced with U+FFFD.", "invalid-codepoint": - _("Invalid codepoint in stream."), + "Invalid codepoint in stream.", "incorrectly-placed-solidus": - _("Solidus (/) incorrectly placed in tag."), + "Solidus (/) incorrectly placed in tag.", "incorrect-cr-newline-entity": - _("Incorrect CR newline entity, replaced with LF."), + "Incorrect CR newline entity, replaced with LF.", "illegal-windows-1252-entity": - _("Entity used with illegal number (windows-1252 reference)."), + "Entity used with illegal number (windows-1252 reference).", "cant-convert-numeric-entity": - _("Numeric entity couldn't be converted to character " - "(codepoint U+%(charAsInt)08x)."), + "Numeric entity couldn't be converted to character " + "(codepoint U+%(charAsInt)08x).", "illegal-codepoint-for-numeric-entity": - _("Numeric entity represents an illegal codepoint: " - "U+%(charAsInt)08x."), + "Numeric entity represents an illegal codepoint: " + "U+%(charAsInt)08x.", "numeric-entity-without-semicolon": - _("Numeric entity didn't end with ';'."), + "Numeric entity didn't end with ';'.", "expected-numeric-entity-but-got-eof": - _("Numeric entity expected. Got end of file instead."), + "Numeric entity expected. Got end of file instead.", "expected-numeric-entity": - _("Numeric entity expected but none found."), + "Numeric entity expected but none found.", "named-entity-without-semicolon": - _("Named entity didn't end with ';'."), + "Named entity didn't end with ';'.", "expected-named-entity": - _("Named entity expected. Got none."), + "Named entity expected. Got none.", "attributes-in-end-tag": - _("End tag contains unexpected attributes."), + "End tag contains unexpected attributes.", 'self-closing-flag-on-end-tag': - _("End tag contains unexpected self-closing flag."), + "End tag contains unexpected self-closing flag.", "expected-tag-name-but-got-right-bracket": - _("Expected tag name. Got '>' instead."), + "Expected tag name. Got '>' instead.", "expected-tag-name-but-got-question-mark": - _("Expected tag name. Got '?' instead. (HTML doesn't " - "support processing instructions.)"), + "Expected tag name. Got '?' instead. (HTML doesn't " + "support processing instructions.)", "expected-tag-name": - _("Expected tag name. Got something else instead"), + "Expected tag name. Got something else instead", "expected-closing-tag-but-got-right-bracket": - _("Expected closing tag. Got '>' instead. Ignoring ''."), + "Expected closing tag. Got '>' instead. Ignoring ''.", "expected-closing-tag-but-got-eof": - _("Expected closing tag. Unexpected end of file."), + "Expected closing tag. Unexpected end of file.", "expected-closing-tag-but-got-char": - _("Expected closing tag. Unexpected character '%(data)s' found."), + "Expected closing tag. Unexpected character '%(data)s' found.", "eof-in-tag-name": - _("Unexpected end of file in the tag name."), + "Unexpected end of file in the tag name.", "expected-attribute-name-but-got-eof": - _("Unexpected end of file. Expected attribute name instead."), + "Unexpected end of file. Expected attribute name instead.", "eof-in-attribute-name": - _("Unexpected end of file in attribute name."), + "Unexpected end of file in attribute name.", "invalid-character-in-attribute-name": - _("Invalid character in attribute name"), + "Invalid character in attribute name", "duplicate-attribute": - _("Dropped duplicate attribute on tag."), + "Dropped duplicate attribute on tag.", "expected-end-of-tag-name-but-got-eof": - _("Unexpected end of file. Expected = or end of tag."), + "Unexpected end of file. Expected = or end of tag.", "expected-attribute-value-but-got-eof": - _("Unexpected end of file. Expected attribute value."), + "Unexpected end of file. Expected attribute value.", "expected-attribute-value-but-got-right-bracket": - _("Expected attribute value. Got '>' instead."), + "Expected attribute value. Got '>' instead.", 'equals-in-unquoted-attribute-value': - _("Unexpected = in unquoted attribute"), + "Unexpected = in unquoted attribute", 'unexpected-character-in-unquoted-attribute-value': - _("Unexpected character in unquoted attribute"), + "Unexpected character in unquoted attribute", "invalid-character-after-attribute-name": - _("Unexpected character after attribute name."), + "Unexpected character after attribute name.", "unexpected-character-after-attribute-value": - _("Unexpected character after attribute value."), + "Unexpected character after attribute value.", "eof-in-attribute-value-double-quote": - _("Unexpected end of file in attribute value (\")."), + "Unexpected end of file in attribute value (\").", "eof-in-attribute-value-single-quote": - _("Unexpected end of file in attribute value (')."), + "Unexpected end of file in attribute value (').", "eof-in-attribute-value-no-quotes": - _("Unexpected end of file in attribute value."), + "Unexpected end of file in attribute value.", "unexpected-EOF-after-solidus-in-tag": - _("Unexpected end of file in tag. Expected >"), + "Unexpected end of file in tag. Expected >", "unexpected-character-after-solidus-in-tag": - _("Unexpected character after / in tag. Expected >"), + "Unexpected character after / in tag. Expected >", "expected-dashes-or-doctype": - _("Expected '--' or 'DOCTYPE'. Not found."), + "Expected '--' or 'DOCTYPE'. Not found.", "unexpected-bang-after-double-dash-in-comment": - _("Unexpected ! after -- in comment"), + "Unexpected ! after -- in comment", "unexpected-space-after-double-dash-in-comment": - _("Unexpected space after -- in comment"), + "Unexpected space after -- in comment", "incorrect-comment": - _("Incorrect comment."), + "Incorrect comment.", "eof-in-comment": - _("Unexpected end of file in comment."), + "Unexpected end of file in comment.", "eof-in-comment-end-dash": - _("Unexpected end of file in comment (-)"), + "Unexpected end of file in comment (-)", "unexpected-dash-after-double-dash-in-comment": - _("Unexpected '-' after '--' found in comment."), + "Unexpected '-' after '--' found in comment.", "eof-in-comment-double-dash": - _("Unexpected end of file in comment (--)."), + "Unexpected end of file in comment (--).", "eof-in-comment-end-space-state": - _("Unexpected end of file in comment."), + "Unexpected end of file in comment.", "eof-in-comment-end-bang-state": - _("Unexpected end of file in comment."), + "Unexpected end of file in comment.", "unexpected-char-in-comment": - _("Unexpected character in comment found."), + "Unexpected character in comment found.", "need-space-after-doctype": - _("No space after literal string 'DOCTYPE'."), + "No space after literal string 'DOCTYPE'.", "expected-doctype-name-but-got-right-bracket": - _("Unexpected > character. Expected DOCTYPE name."), + "Unexpected > character. Expected DOCTYPE name.", "expected-doctype-name-but-got-eof": - _("Unexpected end of file. Expected DOCTYPE name."), + "Unexpected end of file. Expected DOCTYPE name.", "eof-in-doctype-name": - _("Unexpected end of file in DOCTYPE name."), + "Unexpected end of file in DOCTYPE name.", "eof-in-doctype": - _("Unexpected end of file in DOCTYPE."), + "Unexpected end of file in DOCTYPE.", "expected-space-or-right-bracket-in-doctype": - _("Expected space or '>'. Got '%(data)s'"), + "Expected space or '>'. Got '%(data)s'", "unexpected-end-of-doctype": - _("Unexpected end of DOCTYPE."), + "Unexpected end of DOCTYPE.", "unexpected-char-in-doctype": - _("Unexpected character in DOCTYPE."), + "Unexpected character in DOCTYPE.", "eof-in-innerhtml": - _("XXX innerHTML EOF"), + "XXX innerHTML EOF", "unexpected-doctype": - _("Unexpected DOCTYPE. Ignored."), + "Unexpected DOCTYPE. Ignored.", "non-html-root": - _("html needs to be the first start tag."), + "html needs to be the first start tag.", "expected-doctype-but-got-eof": - _("Unexpected End of file. Expected DOCTYPE."), + "Unexpected End of file. Expected DOCTYPE.", "unknown-doctype": - _("Erroneous DOCTYPE."), + "Erroneous DOCTYPE.", "expected-doctype-but-got-chars": - _("Unexpected non-space characters. Expected DOCTYPE."), + "Unexpected non-space characters. Expected DOCTYPE.", "expected-doctype-but-got-start-tag": - _("Unexpected start tag (%(name)s). Expected DOCTYPE."), + "Unexpected start tag (%(name)s). Expected DOCTYPE.", "expected-doctype-but-got-end-tag": - _("Unexpected end tag (%(name)s). Expected DOCTYPE."), + "Unexpected end tag (%(name)s). Expected DOCTYPE.", "end-tag-after-implied-root": - _("Unexpected end tag (%(name)s) after the (implied) root element."), + "Unexpected end tag (%(name)s) after the (implied) root element.", "expected-named-closing-tag-but-got-eof": - _("Unexpected end of file. Expected end tag (%(name)s)."), + "Unexpected end of file. Expected end tag (%(name)s).", "two-heads-are-not-better-than-one": - _("Unexpected start tag head in existing head. Ignored."), + "Unexpected start tag head in existing head. Ignored.", "unexpected-end-tag": - _("Unexpected end tag (%(name)s). Ignored."), + "Unexpected end tag (%(name)s). Ignored.", "unexpected-start-tag-out-of-my-head": - _("Unexpected start tag (%(name)s) that can be in head. Moved."), + "Unexpected start tag (%(name)s) that can be in head. Moved.", "unexpected-start-tag": - _("Unexpected start tag (%(name)s)."), + "Unexpected start tag (%(name)s).", "missing-end-tag": - _("Missing end tag (%(name)s)."), + "Missing end tag (%(name)s).", "missing-end-tags": - _("Missing end tags (%(name)s)."), + "Missing end tags (%(name)s).", "unexpected-start-tag-implies-end-tag": - _("Unexpected start tag (%(startName)s) " - "implies end tag (%(endName)s)."), + "Unexpected start tag (%(startName)s) " + "implies end tag (%(endName)s).", "unexpected-start-tag-treated-as": - _("Unexpected start tag (%(originalName)s). Treated as %(newName)s."), + "Unexpected start tag (%(originalName)s). Treated as %(newName)s.", "deprecated-tag": - _("Unexpected start tag %(name)s. Don't use it!"), + "Unexpected start tag %(name)s. Don't use it!", "unexpected-start-tag-ignored": - _("Unexpected start tag %(name)s. Ignored."), + "Unexpected start tag %(name)s. Ignored.", "expected-one-end-tag-but-got-another": - _("Unexpected end tag (%(gotName)s). " - "Missing end tag (%(expectedName)s)."), + "Unexpected end tag (%(gotName)s). " + "Missing end tag (%(expectedName)s).", "end-tag-too-early": - _("End tag (%(name)s) seen too early. Expected other end tag."), + "End tag (%(name)s) seen too early. Expected other end tag.", "end-tag-too-early-named": - _("Unexpected end tag (%(gotName)s). Expected end tag (%(expectedName)s)."), + "Unexpected end tag (%(gotName)s). Expected end tag (%(expectedName)s).", "end-tag-too-early-ignored": - _("End tag (%(name)s) seen too early. Ignored."), + "End tag (%(name)s) seen too early. Ignored.", "adoption-agency-1.1": - _("End tag (%(name)s) violates step 1, " - "paragraph 1 of the adoption agency algorithm."), + "End tag (%(name)s) violates step 1, " + "paragraph 1 of the adoption agency algorithm.", "adoption-agency-1.2": - _("End tag (%(name)s) violates step 1, " - "paragraph 2 of the adoption agency algorithm."), + "End tag (%(name)s) violates step 1, " + "paragraph 2 of the adoption agency algorithm.", "adoption-agency-1.3": - _("End tag (%(name)s) violates step 1, " - "paragraph 3 of the adoption agency algorithm."), + "End tag (%(name)s) violates step 1, " + "paragraph 3 of the adoption agency algorithm.", "adoption-agency-4.4": - _("End tag (%(name)s) violates step 4, " - "paragraph 4 of the adoption agency algorithm."), + "End tag (%(name)s) violates step 4, " + "paragraph 4 of the adoption agency algorithm.", "unexpected-end-tag-treated-as": - _("Unexpected end tag (%(originalName)s). Treated as %(newName)s."), + "Unexpected end tag (%(originalName)s). Treated as %(newName)s.", "no-end-tag": - _("This element (%(name)s) has no end tag."), + "This element (%(name)s) has no end tag.", "unexpected-implied-end-tag-in-table": - _("Unexpected implied end tag (%(name)s) in the table phase."), + "Unexpected implied end tag (%(name)s) in the table phase.", "unexpected-implied-end-tag-in-table-body": - _("Unexpected implied end tag (%(name)s) in the table body phase."), + "Unexpected implied end tag (%(name)s) in the table body phase.", "unexpected-char-implies-table-voodoo": - _("Unexpected non-space characters in " - "table context caused voodoo mode."), + "Unexpected non-space characters in " + "table context caused voodoo mode.", "unexpected-hidden-input-in-table": - _("Unexpected input with type hidden in table context."), + "Unexpected input with type hidden in table context.", "unexpected-form-in-table": - _("Unexpected form in table context."), + "Unexpected form in table context.", "unexpected-start-tag-implies-table-voodoo": - _("Unexpected start tag (%(name)s) in " - "table context caused voodoo mode."), + "Unexpected start tag (%(name)s) in " + "table context caused voodoo mode.", "unexpected-end-tag-implies-table-voodoo": - _("Unexpected end tag (%(name)s) in " - "table context caused voodoo mode."), + "Unexpected end tag (%(name)s) in " + "table context caused voodoo mode.", "unexpected-cell-in-table-body": - _("Unexpected table cell start tag (%(name)s) " - "in the table body phase."), + "Unexpected table cell start tag (%(name)s) " + "in the table body phase.", "unexpected-cell-end-tag": - _("Got table cell end tag (%(name)s) " - "while required end tags are missing."), + "Got table cell end tag (%(name)s) " + "while required end tags are missing.", "unexpected-end-tag-in-table-body": - _("Unexpected end tag (%(name)s) in the table body phase. Ignored."), + "Unexpected end tag (%(name)s) in the table body phase. Ignored.", "unexpected-implied-end-tag-in-table-row": - _("Unexpected implied end tag (%(name)s) in the table row phase."), + "Unexpected implied end tag (%(name)s) in the table row phase.", "unexpected-end-tag-in-table-row": - _("Unexpected end tag (%(name)s) in the table row phase. Ignored."), + "Unexpected end tag (%(name)s) in the table row phase. Ignored.", "unexpected-select-in-select": - _("Unexpected select start tag in the select phase " - "treated as select end tag."), + "Unexpected select start tag in the select phase " + "treated as select end tag.", "unexpected-input-in-select": - _("Unexpected input start tag in the select phase."), + "Unexpected input start tag in the select phase.", "unexpected-start-tag-in-select": - _("Unexpected start tag token (%(name)s in the select phase. " - "Ignored."), + "Unexpected start tag token (%(name)s in the select phase. " + "Ignored.", "unexpected-end-tag-in-select": - _("Unexpected end tag (%(name)s) in the select phase. Ignored."), + "Unexpected end tag (%(name)s) in the select phase. Ignored.", "unexpected-table-element-start-tag-in-select-in-table": - _("Unexpected table element start tag (%(name)s) in the select in table phase."), + "Unexpected table element start tag (%(name)s) in the select in table phase.", "unexpected-table-element-end-tag-in-select-in-table": - _("Unexpected table element end tag (%(name)s) in the select in table phase."), + "Unexpected table element end tag (%(name)s) in the select in table phase.", "unexpected-char-after-body": - _("Unexpected non-space characters in the after body phase."), + "Unexpected non-space characters in the after body phase.", "unexpected-start-tag-after-body": - _("Unexpected start tag token (%(name)s)" - " in the after body phase."), + "Unexpected start tag token (%(name)s)" + " in the after body phase.", "unexpected-end-tag-after-body": - _("Unexpected end tag token (%(name)s)" - " in the after body phase."), + "Unexpected end tag token (%(name)s)" + " in the after body phase.", "unexpected-char-in-frameset": - _("Unexpected characters in the frameset phase. Characters ignored."), + "Unexpected characters in the frameset phase. Characters ignored.", "unexpected-start-tag-in-frameset": - _("Unexpected start tag token (%(name)s)" - " in the frameset phase. Ignored."), + "Unexpected start tag token (%(name)s)" + " in the frameset phase. Ignored.", "unexpected-frameset-in-frameset-innerhtml": - _("Unexpected end tag token (frameset) " - "in the frameset phase (innerHTML)."), + "Unexpected end tag token (frameset) " + "in the frameset phase (innerHTML).", "unexpected-end-tag-in-frameset": - _("Unexpected end tag token (%(name)s)" - " in the frameset phase. Ignored."), + "Unexpected end tag token (%(name)s)" + " in the frameset phase. Ignored.", "unexpected-char-after-frameset": - _("Unexpected non-space characters in the " - "after frameset phase. Ignored."), + "Unexpected non-space characters in the " + "after frameset phase. Ignored.", "unexpected-start-tag-after-frameset": - _("Unexpected start tag (%(name)s)" - " in the after frameset phase. Ignored."), + "Unexpected start tag (%(name)s)" + " in the after frameset phase. Ignored.", "unexpected-end-tag-after-frameset": - _("Unexpected end tag (%(name)s)" - " in the after frameset phase. Ignored."), + "Unexpected end tag (%(name)s)" + " in the after frameset phase. Ignored.", "unexpected-end-tag-after-body-innerhtml": - _("Unexpected end tag after body(innerHtml)"), + "Unexpected end tag after body(innerHtml)", "expected-eof-but-got-char": - _("Unexpected non-space characters. Expected end of file."), + "Unexpected non-space characters. Expected end of file.", "expected-eof-but-got-start-tag": - _("Unexpected start tag (%(name)s)" - ". Expected end of file."), + "Unexpected start tag (%(name)s)" + ". Expected end of file.", "expected-eof-but-got-end-tag": - _("Unexpected end tag (%(name)s)" - ". Expected end of file."), + "Unexpected end tag (%(name)s)" + ". Expected end of file.", "eof-in-table": - _("Unexpected end of file. Expected table content."), + "Unexpected end of file. Expected table content.", "eof-in-select": - _("Unexpected end of file. Expected select content."), + "Unexpected end of file. Expected select content.", "eof-in-frameset": - _("Unexpected end of file. Expected frameset content."), + "Unexpected end of file. Expected frameset content.", "eof-in-script-in-script": - _("Unexpected end of file. Expected script content."), + "Unexpected end of file. Expected script content.", "eof-in-foreign-lands": - _("Unexpected end of file. Expected foreign content"), + "Unexpected end of file. Expected foreign content", "non-void-element-with-trailing-solidus": - _("Trailing solidus not allowed on element %(name)s"), + "Trailing solidus not allowed on element %(name)s", "unexpected-html-element-in-foreign-content": - _("Element %(name)s not allowed in a non-html context"), + "Element %(name)s not allowed in a non-html context", "unexpected-end-tag-before-html": - _("Unexpected end tag (%(name)s) before html."), + "Unexpected end tag (%(name)s) before html.", "XXX-undefined-error": - _("Undefined error (this sucks and should be fixed)"), + "Undefined error (this sucks and should be fixed)", } namespaces = { @@ -298,7 +296,7 @@ "xmlns": "http://www.w3.org/2000/xmlns/" } -scopingElements = frozenset(( +scopingElements = frozenset([ (namespaces["html"], "applet"), (namespaces["html"], "caption"), (namespaces["html"], "html"), @@ -316,9 +314,9 @@ (namespaces["svg"], "foreignObject"), (namespaces["svg"], "desc"), (namespaces["svg"], "title"), -)) +]) -formattingElements = frozenset(( +formattingElements = frozenset([ (namespaces["html"], "a"), (namespaces["html"], "b"), (namespaces["html"], "big"), @@ -333,9 +331,9 @@ (namespaces["html"], "strong"), (namespaces["html"], "tt"), (namespaces["html"], "u") -)) +]) -specialElements = frozenset(( +specialElements = frozenset([ (namespaces["html"], "address"), (namespaces["html"], "applet"), (namespaces["html"], "area"), @@ -416,22 +414,22 @@ (namespaces["html"], "wbr"), (namespaces["html"], "xmp"), (namespaces["svg"], "foreignObject") -)) +]) -htmlIntegrationPointElements = frozenset(( +htmlIntegrationPointElements = frozenset([ (namespaces["mathml"], "annotaion-xml"), (namespaces["svg"], "foreignObject"), (namespaces["svg"], "desc"), (namespaces["svg"], "title") -)) +]) -mathmlTextIntegrationPointElements = frozenset(( +mathmlTextIntegrationPointElements = frozenset([ (namespaces["mathml"], "mi"), (namespaces["mathml"], "mo"), (namespaces["mathml"], "mn"), (namespaces["mathml"], "ms"), (namespaces["mathml"], "mtext") -)) +]) adjustForeignAttributes = { "xlink:actuate": ("xlink", "actuate", namespaces["xlink"]), @@ -451,21 +449,21 @@ unadjustForeignAttributes = dict([((ns, local), qname) for qname, (prefix, local, ns) in adjustForeignAttributes.items()]) -spaceCharacters = frozenset(( +spaceCharacters = frozenset([ "\t", "\n", "\u000C", " ", "\r" -)) +]) -tableInsertModeElements = frozenset(( +tableInsertModeElements = frozenset([ "table", "tbody", "tfoot", "thead", "tr" -)) +]) asciiLowercase = frozenset(string.ascii_lowercase) asciiUppercase = frozenset(string.ascii_uppercase) @@ -486,7 +484,7 @@ "h6" ) -voidElements = frozenset(( +voidElements = frozenset([ "base", "command", "event-source", @@ -502,11 +500,11 @@ "input", "source", "track" -)) +]) -cdataElements = frozenset(('title', 'textarea')) +cdataElements = frozenset(['title', 'textarea']) -rcdataElements = frozenset(( +rcdataElements = frozenset([ 'style', 'script', 'xmp', @@ -514,27 +512,27 @@ 'noembed', 'noframes', 'noscript' -)) +]) booleanAttributes = { - "": frozenset(("irrelevant",)), - "style": frozenset(("scoped",)), - "img": frozenset(("ismap",)), - "audio": frozenset(("autoplay", "controls")), - "video": frozenset(("autoplay", "controls")), - "script": frozenset(("defer", "async")), - "details": frozenset(("open",)), - "datagrid": frozenset(("multiple", "disabled")), - "command": frozenset(("hidden", "disabled", "checked", "default")), - "hr": frozenset(("noshade")), - "menu": frozenset(("autosubmit",)), - "fieldset": frozenset(("disabled", "readonly")), - "option": frozenset(("disabled", "readonly", "selected")), - "optgroup": frozenset(("disabled", "readonly")), - "button": frozenset(("disabled", "autofocus")), - "input": frozenset(("disabled", "readonly", "required", "autofocus", "checked", "ismap")), - "select": frozenset(("disabled", "readonly", "autofocus", "multiple")), - "output": frozenset(("disabled", "readonly")), + "": frozenset(["irrelevant"]), + "style": frozenset(["scoped"]), + "img": frozenset(["ismap"]), + "audio": frozenset(["autoplay", "controls"]), + "video": frozenset(["autoplay", "controls"]), + "script": frozenset(["defer", "async"]), + "details": frozenset(["open"]), + "datagrid": frozenset(["multiple", "disabled"]), + "command": frozenset(["hidden", "disabled", "checked", "default"]), + "hr": frozenset(["noshade"]), + "menu": frozenset(["autosubmit"]), + "fieldset": frozenset(["disabled", "readonly"]), + "option": frozenset(["disabled", "readonly", "selected"]), + "optgroup": frozenset(["disabled", "readonly"]), + "button": frozenset(["disabled", "autofocus"]), + "input": frozenset(["disabled", "readonly", "required", "autofocus", "checked", "ismap"]), + "select": frozenset(["disabled", "readonly", "autofocus", "multiple"]), + "output": frozenset(["disabled", "readonly"]), } # entitiesWindows1252 has to be _ordered_ and needs to have an index. It @@ -574,7 +572,7 @@ 376 # 0x9F 0x0178 LATIN CAPITAL LETTER Y WITH DIAERESIS ) -xmlEntities = frozenset(('lt;', 'gt;', 'amp;', 'apos;', 'quot;')) +xmlEntities = frozenset(['lt;', 'gt;', 'amp;', 'apos;', 'quot;']) entities = { "AElig": "\xc6", @@ -3088,8 +3086,8 @@ "ParseError": 7 } -tagTokenTypes = frozenset((tokenTypes["StartTag"], tokenTypes["EndTag"], - tokenTypes["EmptyTag"])) +tagTokenTypes = frozenset([tokenTypes["StartTag"], tokenTypes["EndTag"], + tokenTypes["EmptyTag"]]) prefixes = dict([(v, k) for k, v in namespaces.items()]) diff --git a/lib/html5lib/filters/lint.py b/lib/html5lib/filters/lint.py index 7cc99a4ba7..8884696dc5 100644 --- a/lib/html5lib/filters/lint.py +++ b/lib/html5lib/filters/lint.py @@ -1,8 +1,5 @@ from __future__ import absolute_import, division, unicode_literals -from gettext import gettext -_ = gettext - from . import _base from ..constants import cdataElements, rcdataElements, voidElements @@ -23,24 +20,24 @@ def __iter__(self): if type in ("StartTag", "EmptyTag"): name = token["name"] if contentModelFlag != "PCDATA": - raise LintError(_("StartTag not in PCDATA content model flag: %(tag)s") % {"tag": name}) + raise LintError("StartTag not in PCDATA content model flag: %(tag)s" % {"tag": name}) if not isinstance(name, str): - raise LintError(_("Tag name is not a string: %(tag)r") % {"tag": name}) + raise LintError("Tag name is not a string: %(tag)r" % {"tag": name}) if not name: - raise LintError(_("Empty tag name")) + raise LintError("Empty tag name") if type == "StartTag" and name in voidElements: - raise LintError(_("Void element reported as StartTag token: %(tag)s") % {"tag": name}) + raise LintError("Void element reported as StartTag token: %(tag)s" % {"tag": name}) elif type == "EmptyTag" and name not in voidElements: - raise LintError(_("Non-void element reported as EmptyTag token: %(tag)s") % {"tag": token["name"]}) + raise LintError("Non-void element reported as EmptyTag token: %(tag)s" % {"tag": token["name"]}) if type == "StartTag": open_elements.append(name) for name, value in token["data"]: if not isinstance(name, str): - raise LintError(_("Attribute name is not a string: %(name)r") % {"name": name}) + raise LintError("Attribute name is not a string: %(name)r" % {"name": name}) if not name: - raise LintError(_("Empty attribute name")) + raise LintError("Empty attribute name") if not isinstance(value, str): - raise LintError(_("Attribute value is not a string: %(value)r") % {"value": value}) + raise LintError("Attribute value is not a string: %(value)r" % {"value": value}) if name in cdataElements: contentModelFlag = "CDATA" elif name in rcdataElements: @@ -51,43 +48,43 @@ def __iter__(self): elif type == "EndTag": name = token["name"] if not isinstance(name, str): - raise LintError(_("Tag name is not a string: %(tag)r") % {"tag": name}) + raise LintError("Tag name is not a string: %(tag)r" % {"tag": name}) if not name: - raise LintError(_("Empty tag name")) + raise LintError("Empty tag name") if name in voidElements: - raise LintError(_("Void element reported as EndTag token: %(tag)s") % {"tag": name}) + raise LintError("Void element reported as EndTag token: %(tag)s" % {"tag": name}) start_name = open_elements.pop() if start_name != name: - raise LintError(_("EndTag (%(end)s) does not match StartTag (%(start)s)") % {"end": name, "start": start_name}) + raise LintError("EndTag (%(end)s) does not match StartTag (%(start)s)" % {"end": name, "start": start_name}) contentModelFlag = "PCDATA" elif type == "Comment": if contentModelFlag != "PCDATA": - raise LintError(_("Comment not in PCDATA content model flag")) + raise LintError("Comment not in PCDATA content model flag") elif type in ("Characters", "SpaceCharacters"): data = token["data"] if not isinstance(data, str): - raise LintError(_("Attribute name is not a string: %(name)r") % {"name": data}) + raise LintError("Attribute name is not a string: %(name)r" % {"name": data}) if not data: - raise LintError(_("%(type)s token with empty data") % {"type": type}) + raise LintError("%(type)s token with empty data" % {"type": type}) if type == "SpaceCharacters": data = data.strip(spaceCharacters) if data: - raise LintError(_("Non-space character(s) found in SpaceCharacters token: %(token)r") % {"token": data}) + raise LintError("Non-space character(s) found in SpaceCharacters token: %(token)r" % {"token": data}) elif type == "Doctype": name = token["name"] if contentModelFlag != "PCDATA": - raise LintError(_("Doctype not in PCDATA content model flag: %(name)s") % {"name": name}) + raise LintError("Doctype not in PCDATA content model flag: %(name)s" % {"name": name}) if not isinstance(name, str): - raise LintError(_("Tag name is not a string: %(tag)r") % {"tag": name}) + raise LintError("Tag name is not a string: %(tag)r" % {"tag": name}) # XXX: what to do with token["data"] ? elif type in ("ParseError", "SerializeError"): pass else: - raise LintError(_("Unknown token type: %(type)s") % {"type": type}) + raise LintError("Unknown token type: %(type)s" % {"type": type}) yield token diff --git a/lib/html5lib/html5parser.py b/lib/html5lib/html5parser.py index b0f14f3935..12aa6a35e1 100644 --- a/lib/html5lib/html5parser.py +++ b/lib/html5lib/html5parser.py @@ -18,6 +18,7 @@ from .constants import tokenTypes, ReparseException, namespaces from .constants import htmlIntegrationPointElements, mathmlTextIntegrationPointElements from .constants import adjustForeignAttributes as adjustForeignAttributesMap +from .constants import E def parse(doc, treebuilder="etree", encoding=None, @@ -129,6 +130,17 @@ def reset(self): self.framesetOK = True + @property + def documentEncoding(self): + """The name of the character encoding + that was used to decode the input stream, + or :obj:`None` if that is not determined yet. + + """ + if not hasattr(self, 'tokenizer'): + return None + return self.tokenizer.stream.charEncoding[0] + def isHTMLIntegrationPoint(self, element): if (element.name == "annotation-xml" and element.namespace == namespaces["mathml"]): @@ -245,7 +257,7 @@ def parseError(self, errorcode="XXX-undefined-error", datavars={}): # XXX The idea is to make errorcode mandatory. self.errors.append((self.tokenizer.stream.position(), errorcode, datavars)) if self.strict: - raise ParseError + raise ParseError(E[errorcode] % datavars) def normalizeToken(self, token): """ HTML5 specific normalizations to the token stream """ @@ -868,7 +880,7 @@ def __init__(self, parser, tree): self.startTagHandler = utils.MethodDispatcher([ ("html", self.startTagHtml), (("base", "basefont", "bgsound", "command", "link", "meta", - "noframes", "script", "style", "title"), + "script", "style", "title"), self.startTagProcessInHead), ("body", self.startTagBody), ("frameset", self.startTagFrameset), @@ -1205,8 +1217,7 @@ def startTagIsIndex(self, token): attributes["name"] = "isindex" self.processStartTag(impliedTagToken("input", "StartTag", attributes=attributes, - selfClosing= - token["selfClosing"])) + selfClosing=token["selfClosing"])) self.processEndTag(impliedTagToken("label")) self.processStartTag(impliedTagToken("hr", "StartTag")) self.processEndTag(impliedTagToken("form")) diff --git a/lib/html5lib/inputstream.py b/lib/html5lib/inputstream.py index 9e03b9313d..7020aa60f6 100644 --- a/lib/html5lib/inputstream.py +++ b/lib/html5lib/inputstream.py @@ -28,7 +28,18 @@ class BufferedIOBase(object): asciiUppercaseBytes = frozenset([item.encode("ascii") for item in asciiUppercase]) spacesAngleBrackets = spaceCharactersBytes | frozenset([b">", b"<"]) -invalid_unicode_re = re.compile("[\u0001-\u0008\u000B\u000E-\u001F\u007F-\u009F\uD800-\uDFFF\uFDD0-\uFDEF\uFFFE\uFFFF\U0001FFFE\U0001FFFF\U0002FFFE\U0002FFFF\U0003FFFE\U0003FFFF\U0004FFFE\U0004FFFF\U0005FFFE\U0005FFFF\U0006FFFE\U0006FFFF\U0007FFFE\U0007FFFF\U0008FFFE\U0008FFFF\U0009FFFE\U0009FFFF\U000AFFFE\U000AFFFF\U000BFFFE\U000BFFFF\U000CFFFE\U000CFFFF\U000DFFFE\U000DFFFF\U000EFFFE\U000EFFFF\U000FFFFE\U000FFFFF\U0010FFFE\U0010FFFF]") + +invalid_unicode_no_surrogate = "[\u0001-\u0008\u000B\u000E-\u001F\u007F-\u009F\uFDD0-\uFDEF\uFFFE\uFFFF\U0001FFFE\U0001FFFF\U0002FFFE\U0002FFFF\U0003FFFE\U0003FFFF\U0004FFFE\U0004FFFF\U0005FFFE\U0005FFFF\U0006FFFE\U0006FFFF\U0007FFFE\U0007FFFF\U0008FFFE\U0008FFFF\U0009FFFE\U0009FFFF\U000AFFFE\U000AFFFF\U000BFFFE\U000BFFFF\U000CFFFE\U000CFFFF\U000DFFFE\U000DFFFF\U000EFFFE\U000EFFFF\U000FFFFE\U000FFFFF\U0010FFFE\U0010FFFF]" + +if utils.supports_lone_surrogates: + # Use one extra step of indirection and create surrogates with + # unichr. Not using this indirection would introduce an illegal + # unicode literal on platforms not supporting such lone + # surrogates. + invalid_unicode_re = re.compile(invalid_unicode_no_surrogate + + eval('"\\uD800-\\uDFFF"')) +else: + invalid_unicode_re = re.compile(invalid_unicode_no_surrogate) non_bmp_invalid_codepoints = set([0x1FFFE, 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE, 0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, 0x5FFFF, @@ -164,13 +175,18 @@ def __init__(self, source): """ - # Craziness - if len("\U0010FFFF") == 1: + if not utils.supports_lone_surrogates: + # Such platforms will have already checked for such + # surrogate errors, so no need to do this checking. + self.reportCharacterErrors = None + self.replaceCharactersRegexp = None + elif len("\U0010FFFF") == 1: self.reportCharacterErrors = self.characterErrorsUCS4 - self.replaceCharactersRegexp = re.compile("[\uD800-\uDFFF]") + self.replaceCharactersRegexp = re.compile(eval('"[\\uD800-\\uDFFF]"')) else: self.reportCharacterErrors = self.characterErrorsUCS2 - self.replaceCharactersRegexp = re.compile("([\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?/ + (?P[-a-zA-Z0-9.]+/[-a-zA-Z0-9.]+) + # Match any character set and encoding + (?:(?:;charset=(?:[-a-zA-Z0-9]+)(?:;(?:base64))?) + |(?:;(?:base64))?(?:;charset=(?:[-a-zA-Z0-9]+))?) + # Assume the rest is data + ,.* + $ + ''', + re.VERBOSE) + + class HTMLSanitizerMixin(object): """ sanitization of XHTML+MathML+SVG and of inline style attributes.""" @@ -100,8 +115,8 @@ class HTMLSanitizerMixin(object): 'xml:base', 'xml:lang', 'xml:space', 'xmlns', 'xmlns:xlink', 'y', 'y1', 'y2', 'zoomAndPan'] - attr_val_is_uri = ['href', 'src', 'cite', 'action', 'longdesc', 'poster', - 'xlink:href', 'xml:base'] + attr_val_is_uri = ['href', 'src', 'cite', 'action', 'longdesc', 'poster', 'background', 'datasrc', + 'dynsrc', 'lowsrc', 'ping', 'poster', 'xlink:href', 'xml:base'] svg_attr_val_allows_ref = ['clip-path', 'color-profile', 'cursor', 'fill', 'filter', 'marker', 'marker-start', 'marker-mid', 'marker-end', @@ -138,7 +153,9 @@ class HTMLSanitizerMixin(object): acceptable_protocols = ['ed2k', 'ftp', 'http', 'https', 'irc', 'mailto', 'news', 'gopher', 'nntp', 'telnet', 'webcal', 'xmpp', 'callto', 'feed', 'urn', 'aim', 'rsync', 'tag', - 'ssh', 'sftp', 'rtsp', 'afs'] + 'ssh', 'sftp', 'rtsp', 'afs', 'data'] + + acceptable_content_types = ['image/png', 'image/jpeg', 'image/gif', 'image/webp', 'image/bmp', 'text/plain'] # subclasses may define their own versions of these constants allowed_elements = acceptable_elements + mathml_elements + svg_elements @@ -147,6 +164,7 @@ class HTMLSanitizerMixin(object): allowed_css_keywords = acceptable_css_keywords allowed_svg_properties = acceptable_svg_properties allowed_protocols = acceptable_protocols + allowed_content_types = acceptable_content_types # Sanitize the +html+, escaping all elements not in ALLOWED_ELEMENTS, and # stripping out all # attributes not in ALLOWED_ATTRIBUTES. Style @@ -189,10 +207,21 @@ def allowed_token(self, token, token_type): unescape(attrs[attr])).lower() # remove replacement characters from unescaped characters val_unescaped = val_unescaped.replace("\ufffd", "") - if (re.match("^[a-z0-9][-+.a-z0-9]*:", val_unescaped) and - (val_unescaped.split(':')[0] not in - self.allowed_protocols)): + try: + uri = urlparse.urlparse(val_unescaped) + except ValueError: + uri = None del attrs[attr] + if uri and uri.scheme: + if uri.scheme not in self.allowed_protocols: + del attrs[attr] + if uri.scheme == 'data': + m = content_type_rgx.match(uri.path) + if not m: + del attrs[attr] + elif m.group('content_type') not in self.allowed_content_types: + del attrs[attr] + for attr in self.svg_attr_val_allows_ref: if attr in attrs: attrs[attr] = re.sub(r'url\s*\(\s*[^#\s][^)]+?\)', @@ -245,7 +274,7 @@ def sanitize_css(self, style): elif prop.split('-')[0].lower() in ['background', 'border', 'margin', 'padding']: for keyword in value.split(): - if not keyword in self.acceptable_css_keywords and \ + if keyword not in self.acceptable_css_keywords and \ not re.match("^(#[0-9a-f]+|rgb\(\d+%?,\d*%?,?\d*%?\)?|\d{0,2}\.?\d{0,2}(cm|em|ex|in|mm|pc|pt|px|%|,|\))?)$", keyword): break else: diff --git a/lib/html5lib/serializer/htmlserializer.py b/lib/html5lib/serializer/htmlserializer.py index 412a5a2209..be4d634411 100644 --- a/lib/html5lib/serializer/htmlserializer.py +++ b/lib/html5lib/serializer/htmlserializer.py @@ -1,9 +1,6 @@ from __future__ import absolute_import, division, unicode_literals from six import text_type -import gettext -_ = gettext.gettext - try: from functools import reduce except ImportError: @@ -35,7 +32,7 @@ v = utils.surrogatePairToCodepoint(v) else: v = ord(v) - if not v in encode_entity_map or k.islower(): + if v not in encode_entity_map or k.islower(): # prefer < over < and similarly for &, >, etc. encode_entity_map[v] = k @@ -208,7 +205,7 @@ def serialize(self, treewalker, encoding=None): if token["systemId"]: if token["systemId"].find('"') >= 0: if token["systemId"].find("'") >= 0: - self.serializeError(_("System identifer contains both single and double quote characters")) + self.serializeError("System identifer contains both single and double quote characters") quote_char = "'" else: quote_char = '"' @@ -220,7 +217,7 @@ def serialize(self, treewalker, encoding=None): elif type in ("Characters", "SpaceCharacters"): if type == "SpaceCharacters" or in_cdata: if in_cdata and token["data"].find("= 0: - self.serializeError(_("Unexpected " % name) elif type == "Comment": data = token["data"] if data.find("--") >= 0: - self.serializeError(_("Comment contains --")) + self.serializeError("Comment contains --") yield self.encodeStrict("" % token["data"]) elif type == "Entity": name = token["name"] key = name + ";" - if not key in entities: - self.serializeError(_("Entity %s not recognized" % name)) + if key not in entities: + self.serializeError("Entity %s not recognized" % name) if self.resolve_entities and key not in xmlEntities: data = entities[key] else: diff --git a/lib/html5lib/treebuilders/dom.py b/lib/html5lib/treebuilders/dom.py index 61e5ed79ed..234233b793 100644 --- a/lib/html5lib/treebuilders/dom.py +++ b/lib/html5lib/treebuilders/dom.py @@ -158,7 +158,7 @@ def insertText(self, data, parent=None): else: # HACK: allow text nodes as children of the document node if hasattr(self.dom, '_child_node_types'): - if not Node.TEXT_NODE in self.dom._child_node_types: + if Node.TEXT_NODE not in self.dom._child_node_types: self.dom._child_node_types = list(self.dom._child_node_types) self.dom._child_node_types.append(Node.TEXT_NODE) self.dom.appendChild(self.dom.createTextNode(data)) diff --git a/lib/html5lib/treewalkers/__init__.py b/lib/html5lib/treewalkers/__init__.py index 18124e75f3..20b91b114a 100644 --- a/lib/html5lib/treewalkers/__init__.py +++ b/lib/html5lib/treewalkers/__init__.py @@ -10,8 +10,12 @@ from __future__ import absolute_import, division, unicode_literals +__all__ = ["getTreeWalker", "pprint", "dom", "etree", "genshistream", "lxmletree", + "pulldom"] + import sys +from .. import constants from ..utils import default_etree treeWalkerCache = {} @@ -55,3 +59,89 @@ def getTreeWalker(treeType, implementation=None, **kwargs): # XXX: NEVER cache here, caching is done in the etree submodule return etree.getETreeModule(implementation, **kwargs).TreeWalker return treeWalkerCache.get(treeType) + + +def concatenateCharacterTokens(tokens): + pendingCharacters = [] + for token in tokens: + type = token["type"] + if type in ("Characters", "SpaceCharacters"): + pendingCharacters.append(token["data"]) + else: + if pendingCharacters: + yield {"type": "Characters", "data": "".join(pendingCharacters)} + pendingCharacters = [] + yield token + if pendingCharacters: + yield {"type": "Characters", "data": "".join(pendingCharacters)} + + +def pprint(walker): + """Pretty printer for tree walkers""" + output = [] + indent = 0 + for token in concatenateCharacterTokens(walker): + type = token["type"] + if type in ("StartTag", "EmptyTag"): + # tag name + if token["namespace"] and token["namespace"] != constants.namespaces["html"]: + if token["namespace"] in constants.prefixes: + ns = constants.prefixes[token["namespace"]] + else: + ns = token["namespace"] + name = "%s %s" % (ns, token["name"]) + else: + name = token["name"] + output.append("%s<%s>" % (" " * indent, name)) + indent += 2 + # attributes (sorted for consistent ordering) + attrs = token["data"] + for (namespace, localname), value in sorted(attrs.items()): + if namespace: + if namespace in constants.prefixes: + ns = constants.prefixes[namespace] + else: + ns = namespace + name = "%s %s" % (ns, localname) + else: + name = localname + output.append("%s%s=\"%s\"" % (" " * indent, name, value)) + # self-closing + if type == "EmptyTag": + indent -= 2 + + elif type == "EndTag": + indent -= 2 + + elif type == "Comment": + output.append("%s" % (" " * indent, token["data"])) + + elif type == "Doctype": + if token["name"]: + if token["publicId"]: + output.append("""%s""" % + (" " * indent, + token["name"], + token["publicId"], + token["systemId"] if token["systemId"] else "")) + elif token["systemId"]: + output.append("""%s""" % + (" " * indent, + token["name"], + token["systemId"])) + else: + output.append("%s" % (" " * indent, + token["name"])) + else: + output.append("%s" % (" " * indent,)) + + elif type == "Characters": + output.append("%s\"%s\"" % (" " * indent, token["data"])) + + elif type == "SpaceCharacters": + assert False, "concatenateCharacterTokens should have got rid of all Space tokens" + + else: + raise ValueError("Unknown token type, %s" % type) + + return "\n".join(output) diff --git a/lib/html5lib/treewalkers/_base.py b/lib/html5lib/treewalkers/_base.py index 34252e50c0..4e11cd0202 100644 --- a/lib/html5lib/treewalkers/_base.py +++ b/lib/html5lib/treewalkers/_base.py @@ -1,8 +1,8 @@ from __future__ import absolute_import, division, unicode_literals from six import text_type, string_types -import gettext -_ = gettext.gettext +__all__ = ["DOCUMENT", "DOCTYPE", "TEXT", "ELEMENT", "COMMENT", "ENTITY", "UNKNOWN", + "TreeWalker", "NonRecursiveTreeWalker"] from xml.dom import Node @@ -58,7 +58,7 @@ def emptyTag(self, namespace, name, attrs, hasChildren=False): "namespace": to_text(namespace), "data": attrs} if hasChildren: - yield self.error(_("Void element has children")) + yield self.error("Void element has children") def startTag(self, namespace, name, attrs): assert namespace is None or isinstance(namespace, string_types), type(namespace) @@ -122,7 +122,7 @@ def entity(self, name): return {"type": "Entity", "name": text_type(name)} def unknown(self, nodeType): - return self.error(_("Unknown node type: ") + nodeType) + return self.error("Unknown node type: " + nodeType) class NonRecursiveTreeWalker(TreeWalker): diff --git a/lib/html5lib/treewalkers/dom.py b/lib/html5lib/treewalkers/dom.py index a01287a944..ac4dcf31bf 100644 --- a/lib/html5lib/treewalkers/dom.py +++ b/lib/html5lib/treewalkers/dom.py @@ -2,9 +2,6 @@ from xml.dom import Node -import gettext -_ = gettext.gettext - from . import _base diff --git a/lib/html5lib/treewalkers/etree.py b/lib/html5lib/treewalkers/etree.py index fd8a9cc9b5..69840c21e4 100644 --- a/lib/html5lib/treewalkers/etree.py +++ b/lib/html5lib/treewalkers/etree.py @@ -7,12 +7,10 @@ from ordereddict import OrderedDict except ImportError: OrderedDict = dict -import gettext -_ = gettext.gettext import re -from six import text_type +from six import string_types from . import _base from ..utils import moduleFactoryFactory @@ -60,7 +58,7 @@ def getNodeDetails(self, node): return _base.COMMENT, node.text else: - assert type(node.tag) == text_type, type(node.tag) + assert isinstance(node.tag, string_types), type(node.tag) # This is assumed to be an ordinary element match = tag_regexp.match(node.tag) if match: diff --git a/lib/html5lib/treewalkers/lxmletree.py b/lib/html5lib/treewalkers/lxmletree.py index bc934ac05a..90e116d386 100644 --- a/lib/html5lib/treewalkers/lxmletree.py +++ b/lib/html5lib/treewalkers/lxmletree.py @@ -4,9 +4,6 @@ from lxml import etree from ..treebuilders.etree import tag_regexp -from gettext import gettext -_ = gettext - from . import _base from .. import ihatexml @@ -130,7 +127,7 @@ def __init__(self, tree): def getNodeDetails(self, node): if isinstance(node, tuple): # Text node node, key = node - assert key in ("text", "tail"), _("Text nodes are text or tail, found %s") % key + assert key in ("text", "tail"), "Text nodes are text or tail, found %s" % key return _base.TEXT, ensure_str(getattr(node, key)) elif isinstance(node, Root): @@ -169,7 +166,7 @@ def getNodeDetails(self, node): attrs, len(node) > 0 or node.text) def getFirstChild(self, node): - assert not isinstance(node, tuple), _("Text nodes have no children") + assert not isinstance(node, tuple), "Text nodes have no children" assert len(node) or node.text, "Node has no children" if node.text: @@ -180,7 +177,7 @@ def getFirstChild(self, node): def getNextSibling(self, node): if isinstance(node, tuple): # Text node node, key = node - assert key in ("text", "tail"), _("Text nodes are text or tail, found %s") % key + assert key in ("text", "tail"), "Text nodes are text or tail, found %s" % key if key == "text": # XXX: we cannot use a "bool(node) and node[0] or None" construct here # because node[0] might evaluate to False if it has no child element @@ -196,7 +193,7 @@ def getNextSibling(self, node): def getParentNode(self, node): if isinstance(node, tuple): # Text node node, key = node - assert key in ("text", "tail"), _("Text nodes are text or tail, found %s") % key + assert key in ("text", "tail"), "Text nodes are text or tail, found %s" % key if key == "text": return node # else: fallback to "normal" processing diff --git a/lib/html5lib/utils.py b/lib/html5lib/utils.py index 2f41f4dfa6..fdc18febb5 100644 --- a/lib/html5lib/utils.py +++ b/lib/html5lib/utils.py @@ -2,6 +2,8 @@ from types import ModuleType +from six import text_type + try: import xml.etree.cElementTree as default_etree except ImportError: @@ -9,7 +11,26 @@ __all__ = ["default_etree", "MethodDispatcher", "isSurrogatePair", - "surrogatePairToCodepoint", "moduleFactoryFactory"] + "surrogatePairToCodepoint", "moduleFactoryFactory", + "supports_lone_surrogates"] + + +# Platforms not supporting lone surrogates (\uD800-\uDFFF) should be +# caught by the below test. In general this would be any platform +# using UTF-16 as its encoding of unicode strings, such as +# Jython. This is because UTF-16 itself is based on the use of such +# surrogates, and there is no mechanism to further escape such +# escapes. +try: + _x = eval('"\\uD800"') + if not isinstance(_x, text_type): + # We need this with u"" because of http://bugs.jython.org/issue2039 + _x = eval('u"\\uD800"') + assert isinstance(_x, text_type) +except: + supports_lone_surrogates = False +else: + supports_lone_surrogates = True class MethodDispatcher(dict): diff --git a/lib/httplib2/__init__.py b/lib/httplib2/__init__.py index b88eb3f7cb..6fa3cc60e5 100644 --- a/lib/httplib2/__init__.py +++ b/lib/httplib2/__init__.py @@ -3,7 +3,7 @@ httplib2 A caching http interface that supports ETags and gzip -to conserve bandwidth. +to conserve bandwidth. Requires Python 2.3 or later @@ -15,17 +15,17 @@ __author__ = "Joe Gregorio (joe@bitworking.org)" __copyright__ = "Copyright 2006, Joe Gregorio" __contributors__ = ["Thomas Broyer (t.broyer@ltgt.net)", - "James Antill", - "Xavier Verges Farrero", - "Jonathan Feinberg", - "Blair Zajac", - "Sam Ruby", - "Louis Nyffenegger"] + "James Antill", + "Xavier Verges Farrero", + "Jonathan Feinberg", + "Blair Zajac", + "Sam Ruby", + "Louis Nyffenegger"] __license__ = "MIT" -__version__ = "$Rev$" +__version__ = "0.9.2" -import re -import sys +import re +import sys import email import email.Utils import email.Message @@ -35,6 +35,7 @@ import zlib import httplib import urlparse +import urllib import base64 import os import copy @@ -42,10 +43,10 @@ import time import random import errno -# remove depracated warning in python2.6 try: from hashlib import sha1 as _sha, md5 as _md5 except ImportError: + # prior to Python 2.5, these were separate modules import sha import md5 _sha = sha.new @@ -54,22 +55,38 @@ from gettext import gettext as _ import socket -# Try using local version, followed by system, and none if neither are found try: - import socks as socks + from httplib2 import socks except ImportError: try: - import socks as socks - except ImportError: + import socks + except (ImportError, AttributeError): socks = None # Build the appropriate socket wrapper for ssl try: import ssl # python 2.6 ssl_SSLError = ssl.SSLError - _ssl_wrap_socket = ssl.wrap_socket -except ImportError: - def _ssl_wrap_socket(sock, key_file, cert_file): + def _ssl_wrap_socket(sock, key_file, cert_file, + disable_validation, ca_certs): + if disable_validation: + cert_reqs = ssl.CERT_NONE + else: + cert_reqs = ssl.CERT_REQUIRED + # We should be specifying SSL version 3 or TLS v1, but the ssl module + # doesn't expose the necessary knobs. So we need to go with the default + # of SSLv23. + return ssl.wrap_socket(sock, keyfile=key_file, certfile=cert_file, + cert_reqs=cert_reqs, ca_certs=ca_certs) +except (AttributeError, ImportError): + ssl_SSLError = None + def _ssl_wrap_socket(sock, key_file, cert_file, + disable_validation, ca_certs): + if not disable_validation: + raise CertificateValidationUnsupported( + "SSL certificate validation is not supported without " + "the ssl module installed. To avoid this error, install " + "the ssl module, or explicity disable validation.") ssl_sock = socket.ssl(sock, key_file, cert_file) return httplib.FakeSocket(sock, ssl_sock) @@ -85,15 +102,19 @@ def has_timeout(timeout): # python 2.6 return (timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT) return (timeout is not None) -__all__ = ['Http', 'Response', 'ProxyInfo', 'HttpLib2Error', - 'RedirectMissingLocation', 'RedirectLimit', 'FailedToDecompressContent', - 'UnimplementedDigestAuthOptionError', 'UnimplementedHmacDigestAuthOptionError', - 'debuglevel'] +__all__ = [ + 'Http', 'Response', 'ProxyInfo', 'HttpLib2Error', 'RedirectMissingLocation', + 'RedirectLimit', 'FailedToDecompressContent', + 'UnimplementedDigestAuthOptionError', + 'UnimplementedHmacDigestAuthOptionError', + 'debuglevel', 'ProxiesUnavailableError'] # The httplib debug level, set to a non-zero value to get debug output debuglevel = 0 +# A request will be tried 'RETRIES' times if it fails at the socket/connection level. +RETRIES = 2 # Python 2.3 support if sys.version_info < (2,4): @@ -114,8 +135,8 @@ def HTTPResponse__getheaders(self): # All exceptions raised here derive from HttpLib2Error class HttpLib2Error(Exception): pass -# Some exceptions can be caught and optionally -# be turned back into responses. +# Some exceptions can be caught and optionally +# be turned back into responses. class HttpLib2ErrorWithResponse(HttpLib2Error): def __init__(self, desc, response, content): self.response = response @@ -128,8 +149,18 @@ class FailedToDecompressContent(HttpLib2ErrorWithResponse): pass class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): pass class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): pass +class MalformedHeader(HttpLib2Error): pass class RelativeURIError(HttpLib2Error): pass class ServerNotFoundError(HttpLib2Error): pass +class ProxiesUnavailableError(HttpLib2Error): pass +class CertificateValidationUnsupported(HttpLib2Error): pass +class SSLHandshakeError(HttpLib2Error): pass +class NotSupportedOnThisPlatform(HttpLib2Error): pass +class CertificateHostnameMismatch(SSLHandshakeError): + def __init__(self, desc, host, cert): + HttpLib2Error.__init__(self, desc) + self.host = host + self.cert = cert # Open Items: # ----------- @@ -153,6 +184,16 @@ class ServerNotFoundError(HttpLib2Error): pass # requesting that URI again. DEFAULT_MAX_REDIRECTS = 5 +try: + # Users can optionally provide a module that tells us where the CA_CERTS + # are located. + import ca_certs_locater + CA_CERTS = ca_certs_locater.get() +except ImportError: + # Default CA certificates file bundled with httplib2. + CA_CERTS = os.path.join( + os.path.dirname(os.path.abspath(__file__ )), "cacerts.txt") + # Which headers are hop-by-hop headers by default HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade'] @@ -177,7 +218,7 @@ def urlnorm(uri): raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri) authority = authority.lower() scheme = scheme.lower() - if not path: + if not path: path = "/" # Could do syntax based normalization of the URI before # computing the digest. See Section 6.2.2 of Std 66. @@ -229,7 +270,7 @@ def _parse_cache_control(headers): parts_with_args = [tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=")] parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")] retval = dict(parts_with_args + parts_wo_args) - return retval + return retval # Whether to use a strict mode to parse WWW-Authenticate headers # Might lead to bad results in case of ill-formed header value, @@ -250,25 +291,30 @@ def _parse_www_authenticate(headers, headername='www-authenticate'): per auth_scheme.""" retval = {} if headers.has_key(headername): - authenticate = headers[headername].strip() - www_auth = USE_WWW_AUTH_STRICT_PARSING and WWW_AUTH_STRICT or WWW_AUTH_RELAXED - while authenticate: - # Break off the scheme at the beginning of the line - if headername == 'authentication-info': - (auth_scheme, the_rest) = ('digest', authenticate) - else: - (auth_scheme, the_rest) = authenticate.split(" ", 1) - # Now loop over all the key value pairs that come after the scheme, - # being careful not to roll into the next scheme - match = www_auth.search(the_rest) - auth_params = {} - while match: - if match and len(match.groups()) == 3: - (key, value, the_rest) = match.groups() - auth_params[key.lower()] = UNQUOTE_PAIRS.sub(r'\1', value) # '\\'.join([x.replace('\\', '') for x in value.split('\\\\')]) + try: + + authenticate = headers[headername].strip() + www_auth = USE_WWW_AUTH_STRICT_PARSING and WWW_AUTH_STRICT or WWW_AUTH_RELAXED + while authenticate: + # Break off the scheme at the beginning of the line + if headername == 'authentication-info': + (auth_scheme, the_rest) = ('digest', authenticate) + else: + (auth_scheme, the_rest) = authenticate.split(" ", 1) + # Now loop over all the key value pairs that come after the scheme, + # being careful not to roll into the next scheme match = www_auth.search(the_rest) - retval[auth_scheme.lower()] = auth_params - authenticate = the_rest.strip() + auth_params = {} + while match: + if match and len(match.groups()) == 3: + (key, value, the_rest) = match.groups() + auth_params[key.lower()] = UNQUOTE_PAIRS.sub(r'\1', value) # '\\'.join([x.replace('\\', '') for x in value.split('\\\\')]) + match = www_auth.search(the_rest) + retval[auth_scheme.lower()] = auth_params + authenticate = the_rest.strip() + + except ValueError: + raise MalformedHeader("WWW-Authenticate") return retval @@ -280,17 +326,17 @@ def _entry_disposition(response_headers, request_headers): 1. Cache-Control: max-stale 2. Age: headers are not used in the calculations. - Not that this algorithm is simpler than you might think + Not that this algorithm is simpler than you might think because we are operating as a private (non-shared) cache. This lets us ignore 's-maxage'. We can also ignore 'proxy-invalidate' since we aren't a proxy. - We will never return a stale document as - fresh as a design decision, and thus the non-implementation - of 'max-stale'. This also lets us safely ignore 'must-revalidate' + We will never return a stale document as + fresh as a design decision, and thus the non-implementation + of 'max-stale'. This also lets us safely ignore 'must-revalidate' since we operate as if every server has sent 'must-revalidate'. Since we are private we get to ignore both 'public' and 'private' parameters. We also ignore 'no-transform' since - we don't do any transformations. + we don't do any transformations. The 'no-store' parameter is handled at a higher level. So the only Cache-Control parameters we look at are: @@ -299,7 +345,7 @@ def _entry_disposition(response_headers, request_headers): max-age min-fresh """ - + retval = "STALE" cc = _parse_cache_control(request_headers) cc_response = _parse_cache_control(response_headers) @@ -341,10 +387,10 @@ def _entry_disposition(response_headers, request_headers): min_fresh = int(cc['min-fresh']) except ValueError: min_fresh = 0 - current_age += min_fresh + current_age += min_fresh if freshness_lifetime > current_age: retval = "FRESH" - return retval + return retval def _decompressContent(response, new_content): content = new_content @@ -392,7 +438,7 @@ def _updateCache(request_headers, response_headers, content, cache, cachekey): if status == 304: status = 200 - status_header = 'status: %d\r\n' % response_headers.status + status_header = 'status: %d\r\n' % status header_str = info.as_string() @@ -409,10 +455,10 @@ def _wsse_username_token(cnonce, iso_now, password): return base64.b64encode(_sha("%s%s%s" % (cnonce, iso_now, password)).digest()).strip() -# For credentials we need two things, first +# For credentials we need two things, first # a pool of credential to try (not necesarily tied to BAsic, Digest, etc.) # Then we also need a list of URIs that have already demanded authentication -# That list is tricky since sub-URIs can take the same auth, or the +# That list is tricky since sub-URIs can take the same auth, or the # auth scheme may change as you descend the tree. # So we also need each Auth instance to be able to tell us # how close to the 'top' it is. @@ -436,7 +482,7 @@ def inscope(self, host, request_uri): def request(self, method, request_uri, headers, content): """Modify the request headers to add the appropriate - Authorization header. Over-rise this in sub-classes.""" + Authorization header. Over-ride this in sub-classes.""" pass def response(self, response, content): @@ -444,7 +490,7 @@ def response(self, response, content): or such returned from the last authorized response. Over-rise this in sub-classes if necessary. - Return TRUE is the request is to be retried, for + Return TRUE is the request is to be retried, for example Digest may return stale=true. """ return False @@ -462,7 +508,7 @@ def request(self, method, request_uri, headers, content): class DigestAuthentication(Authentication): - """Only do qop='auth' and MD5, since that + """Only do qop='auth' and MD5, since that is all Apache currently implements""" def __init__(self, credentials, host, request_uri, headers, response, content, http): Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http) @@ -475,7 +521,7 @@ def __init__(self, credentials, host, request_uri, headers, response, content, h self.challenge['algorithm'] = self.challenge.get('algorithm', 'MD5').upper() if self.challenge['algorithm'] != 'MD5': raise UnimplementedDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm'])) - self.A1 = "".join([self.credentials[0], ":", self.challenge['realm'], ":", self.credentials[1]]) + self.A1 = "".join([self.credentials[0], ":", self.challenge['realm'], ":", self.credentials[1]]) self.challenge['nc'] = 1 def request(self, method, request_uri, headers, content, cnonce = None): @@ -483,23 +529,24 @@ def request(self, method, request_uri, headers, content, cnonce = None): H = lambda x: _md5(x).hexdigest() KD = lambda s, d: H("%s:%s" % (s, d)) A2 = "".join([method, ":", request_uri]) - self.challenge['cnonce'] = cnonce or _cnonce() - request_digest = '"%s"' % KD(H(self.A1), "%s:%s:%s:%s:%s" % (self.challenge['nonce'], - '%08x' % self.challenge['nc'], - self.challenge['cnonce'], - self.challenge['qop'], H(A2) - )) - headers['Authorization'] = 'Digest username="%s", realm="%s", nonce="%s", uri="%s", algorithm=%s, response=%s, qop=%s, nc=%08x, cnonce="%s"' % ( - self.credentials[0], + self.challenge['cnonce'] = cnonce or _cnonce() + request_digest = '"%s"' % KD(H(self.A1), "%s:%s:%s:%s:%s" % ( + self.challenge['nonce'], + '%08x' % self.challenge['nc'], + self.challenge['cnonce'], + self.challenge['qop'], H(A2))) + headers['authorization'] = 'Digest username="%s", realm="%s", nonce="%s", uri="%s", algorithm=%s, response=%s, qop=%s, nc=%08x, cnonce="%s"' % ( + self.credentials[0], self.challenge['realm'], self.challenge['nonce'], - request_uri, + request_uri, self.challenge['algorithm'], request_digest, self.challenge['qop'], self.challenge['nc'], - self.challenge['cnonce'], - ) + self.challenge['cnonce']) + if self.challenge.get('opaque'): + headers['authorization'] += ', opaque="%s"' % self.challenge['opaque'] self.challenge['nc'] += 1 def response(self, response, content): @@ -507,14 +554,14 @@ def response(self, response, content): challenge = _parse_www_authenticate(response, 'www-authenticate').get('digest', {}) if 'true' == challenge.get('stale'): self.challenge['nonce'] = challenge['nonce'] - self.challenge['nc'] = 1 + self.challenge['nc'] = 1 return True else: updated_challenge = _parse_www_authenticate(response, 'authentication-info').get('digest', {}) if updated_challenge.has_key('nextnonce'): self.challenge['nonce'] = updated_challenge['nextnonce'] - self.challenge['nc'] = 1 + self.challenge['nc'] = 1 return False @@ -548,9 +595,8 @@ def __init__(self, credentials, host, request_uri, headers, response, content, h else: self.pwhashmod = _sha self.key = "".join([self.credentials[0], ":", - self.pwhashmod.new("".join([self.credentials[1], self.challenge['salt']])).hexdigest().lower(), - ":", self.challenge['realm'] - ]) + self.pwhashmod.new("".join([self.credentials[1], self.challenge['salt']])).hexdigest().lower(), + ":", self.challenge['realm']]) self.key = self.pwhashmod.new(self.key).hexdigest().lower() def request(self, method, request_uri, headers, content): @@ -562,16 +608,15 @@ def request(self, method, request_uri, headers, content): cnonce = _cnonce() request_digest = "%s:%s:%s:%s:%s" % (method, request_uri, cnonce, self.challenge['snonce'], headers_val) request_digest = hmac.new(self.key, request_digest, self.hashmod).hexdigest().lower() - headers['Authorization'] = 'HMACDigest username="%s", realm="%s", snonce="%s", cnonce="%s", uri="%s", created="%s", response="%s", headers="%s"' % ( - self.credentials[0], + headers['authorization'] = 'HMACDigest username="%s", realm="%s", snonce="%s", cnonce="%s", uri="%s", created="%s", response="%s", headers="%s"' % ( + self.credentials[0], self.challenge['realm'], self.challenge['snonce'], cnonce, - request_uri, + request_uri, created, request_digest, - keylist, - ) + keylist) def response(self, response, content): challenge = _parse_www_authenticate(response, 'www-authenticate').get('hmacdigest', {}) @@ -584,7 +629,7 @@ class WsseAuthentication(Authentication): """This is thinly tested and should not be relied upon. At this time there isn't any third party server to test against. Blogger and TypePad implemented this algorithm at one point - but Blogger has since switched to Basic over HTTPS and + but Blogger has since switched to Basic over HTTPS and TypePad has implemented it wrong, by never issuing a 401 challenge but instead requiring your client to telepathically know that their endpoint is expecting WSSE profile="UsernameToken".""" @@ -594,7 +639,7 @@ def __init__(self, credentials, host, request_uri, headers, response, content, h def request(self, method, request_uri, headers, content): """Modify the request headers to add the appropriate Authorization header.""" - headers['Authorization'] = 'WSSE profile="UsernameToken"' + headers['authorization'] = 'WSSE profile="UsernameToken"' iso_now = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()) cnonce = _cnonce() password_digest = _wsse_username_token(cnonce, iso_now, self.credentials[1]) @@ -630,7 +675,7 @@ def __init__(self, credentials, host, request_uri, headers, response, content, h def request(self, method, request_uri, headers, content): """Modify the request headers to add the appropriate Authorization header.""" - headers['authorization'] = 'GoogleLogin Auth=' + self.Auth + headers['authorization'] = 'GoogleLogin Auth=' + self.Auth AUTH_SCHEME_CLASSES = { @@ -645,13 +690,13 @@ def request(self, method, request_uri, headers, content): class FileCache(object): """Uses a local directory as a store for cached files. - Not really safe to use if multiple threads or processes are going to + Not really safe to use if multiple threads or processes are going to be running on the same cache. """ def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior self.cache = cache self.safe = safe - if not os.path.exists(cache): + if not os.path.exists(cache): os.makedirs(self.cache) def get(self, key): @@ -661,7 +706,7 @@ def get(self, key): f = file(cacheFullPath, "rb") retval = f.read() f.close() - except IOError, e: + except IOError: pass return retval @@ -689,34 +734,142 @@ def clear(self): def iter(self, domain): for (cdomain, name, password) in self.credentials: if cdomain == "" or domain == cdomain: - yield (name, password) + yield (name, password) class KeyCerts(Credentials): """Identical to Credentials except that name/password are mapped to key/cert.""" pass +class AllHosts(object): + pass class ProxyInfo(object): - """Collect information required to use a proxy.""" - def __init__(self, proxy_type, proxy_host, proxy_port, proxy_rdns=None, proxy_user=None, proxy_pass=None): - """The parameter proxy_type must be set to one of socks.PROXY_TYPE_XXX - constants. For example: + """Collect information required to use a proxy.""" + bypass_hosts = () + + def __init__(self, proxy_type, proxy_host, proxy_port, + proxy_rdns=True, proxy_user=None, proxy_pass=None): + """ + Args: + proxy_type: The type of proxy server. This must be set to one of + socks.PROXY_TYPE_XXX constants. For example: + + p = ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP, + proxy_host='localhost', proxy_port=8000) + + proxy_host: The hostname or IP address of the proxy server. + + proxy_port: The port that the proxy server is running on. + + proxy_rdns: If True (default), DNS queries will not be performed + locally, and instead, handed to the proxy to resolve. This is useful + if the network does not allow resolution of non-local names. In + httplib2 0.9 and earlier, this defaulted to False. + + proxy_user: The username used to authenticate with the proxy server. + + proxy_pass: The password used to authenticate with the proxy server. + """ + self.proxy_type = proxy_type + self.proxy_host = proxy_host + self.proxy_port = proxy_port + self.proxy_rdns = proxy_rdns + self.proxy_user = proxy_user + self.proxy_pass = proxy_pass + + def astuple(self): + return (self.proxy_type, self.proxy_host, self.proxy_port, + self.proxy_rdns, self.proxy_user, self.proxy_pass) + + def isgood(self): + return (self.proxy_host != None) and (self.proxy_port != None) + + def applies_to(self, hostname): + return not self.bypass_host(hostname) + + def bypass_host(self, hostname): + """Has this host been excluded from the proxy config""" + if self.bypass_hosts is AllHosts: + return True -p = ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP, proxy_host='localhost', proxy_port=8000) - """ - self.proxy_type, self.proxy_host, self.proxy_port, self.proxy_rdns, self.proxy_user, self.proxy_pass = proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass + bypass = False + for domain in self.bypass_hosts: + if hostname.endswith(domain): + bypass = True - def astuple(self): - return (self.proxy_type, self.proxy_host, self.proxy_port, self.proxy_rdns, - self.proxy_user, self.proxy_pass) + return bypass - def isgood(self): - return socks and (self.proxy_host != None) and (self.proxy_port != None) + +def proxy_info_from_environment(method='http'): + """ + Read proxy info from the environment variables. + """ + if method not in ['http', 'https']: + return + + env_var = method + '_proxy' + url = os.environ.get(env_var, os.environ.get(env_var.upper())) + if not url: + return + pi = proxy_info_from_url(url, method) + + no_proxy = os.environ.get('no_proxy', os.environ.get('NO_PROXY', '')) + bypass_hosts = [] + if no_proxy: + bypass_hosts = no_proxy.split(',') + # special case, no_proxy=* means all hosts bypassed + if no_proxy == '*': + bypass_hosts = AllHosts + + pi.bypass_hosts = bypass_hosts + return pi + +def proxy_info_from_url(url, method='http'): + """ + Construct a ProxyInfo from a URL (such as http_proxy env var) + """ + url = urlparse.urlparse(url) + username = None + password = None + port = None + if '@' in url[1]: + ident, host_port = url[1].split('@', 1) + if ':' in ident: + username, password = ident.split(':', 1) + else: + password = ident + else: + host_port = url[1] + if ':' in host_port: + host, port = host_port.split(':', 1) + else: + host = host_port + + if port: + port = int(port) + else: + port = dict(https=443, http=80)[method] + + proxy_type = 3 # socks.PROXY_TYPE_HTTP + return ProxyInfo( + proxy_type = proxy_type, + proxy_host = host, + proxy_port = port, + proxy_user = username or None, + proxy_pass = password or None, + ) class HTTPConnectionWithTimeout(httplib.HTTPConnection): - """HTTPConnection subclass that supports timeouts""" + """ + HTTPConnection subclass that supports timeouts + + All timeouts are in seconds. If None is passed for timeout then + Python's default timeout for sockets will be used. See for example + the docs of socket.setdefaulttimeout(): + http://docs.python.org/library/socket.html#socket.setdefaulttimeout + """ def __init__(self, host, port=None, strict=None, timeout=None, proxy_info=None): httplib.HTTPConnection.__init__(self, host, port, strict) @@ -726,27 +879,46 @@ def __init__(self, host, port=None, strict=None, timeout=None, proxy_info=None): def connect(self): """Connect to the host and port specified in __init__.""" # Mostly verbatim from httplib.py. + if self.proxy_info and socks is None: + raise ProxiesUnavailableError( + 'Proxy support missing but proxy use was requested!') msg = "getaddrinfo returns an empty list" - for res in socket.getaddrinfo(self.host, self.port, 0, - socket.SOCK_STREAM): + if self.proxy_info and self.proxy_info.isgood(): + use_proxy = True + proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple() + + host = proxy_host + port = proxy_port + else: + use_proxy = False + + host = self.host + port = self.port + + for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res try: - if self.proxy_info and self.proxy_info.isgood(): + if use_proxy: self.sock = socks.socksocket(af, socktype, proto) - self.sock.setproxy(*self.proxy_info.astuple()) + self.sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass) else: self.sock = socket.socket(af, socktype, proto) + self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) # Different from httplib: support timeouts. if has_timeout(self.timeout): self.sock.settimeout(self.timeout) # End of difference from httplib. if self.debuglevel > 0: - print "connect: (%s, %s)" % (self.host, self.port) + print "connect: (%s, %s) ************" % (self.host, self.port) + if use_proxy: + print "proxy: %s ************" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)) - self.sock.connect(sa) + self.sock.connect((self.host, self.port) + sa[2:]) except socket.error, msg: if self.debuglevel > 0: - print 'connect fail:', (self.host, self.port) + print "connect fail: (%s, %s)" % (self.host, self.port) + if use_proxy: + print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)) if self.sock: self.sock.close() self.sock = None @@ -756,56 +928,267 @@ def connect(self): raise socket.error, msg class HTTPSConnectionWithTimeout(httplib.HTTPSConnection): - "This class allows communication via SSL." + """ + This class allows communication via SSL. + All timeouts are in seconds. If None is passed for timeout then + Python's default timeout for sockets will be used. See for example + the docs of socket.setdefaulttimeout(): + http://docs.python.org/library/socket.html#socket.setdefaulttimeout + """ def __init__(self, host, port=None, key_file=None, cert_file=None, - strict=None, timeout=None, proxy_info=None): - httplib.HTTPSConnection.__init__(self, host, port=port, key_file=key_file, - cert_file=cert_file, strict=strict) + strict=None, timeout=None, proxy_info=None, + ca_certs=None, disable_ssl_certificate_validation=False): + httplib.HTTPSConnection.__init__(self, host, port=port, + key_file=key_file, + cert_file=cert_file, strict=strict) self.timeout = timeout self.proxy_info = proxy_info + if ca_certs is None: + ca_certs = CA_CERTS + self.ca_certs = ca_certs + self.disable_ssl_certificate_validation = \ + disable_ssl_certificate_validation + + # The following two methods were adapted from https_wrapper.py, released + # with the Google Appengine SDK at + # http://googleappengine.googlecode.com/svn-history/r136/trunk/python/google/appengine/tools/https_wrapper.py + # under the following license: + # + # Copyright 2007 Google Inc. + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. + # + + def _GetValidHostsForCert(self, cert): + """Returns a list of valid host globs for an SSL certificate. + + Args: + cert: A dictionary representing an SSL certificate. + Returns: + list: A list of valid host globs. + """ + if 'subjectAltName' in cert: + return [x[1] for x in cert['subjectAltName'] + if x[0].lower() == 'dns'] + else: + return [x[0][1] for x in cert['subject'] + if x[0][0].lower() == 'commonname'] + + def _ValidateCertificateHostname(self, cert, hostname): + """Validates that a given hostname is valid for an SSL certificate. + + Args: + cert: A dictionary representing an SSL certificate. + hostname: The hostname to test. + Returns: + bool: Whether or not the hostname is valid for this certificate. + """ + hosts = self._GetValidHostsForCert(cert) + for host in hosts: + host_re = host.replace('.', '\.').replace('*', '[^.]*') + if re.search('^%s$' % (host_re,), hostname, re.I): + return True + return False def connect(self): "Connect to a host on a given (SSL) port." + msg = "getaddrinfo returns an empty list" if self.proxy_info and self.proxy_info.isgood(): - sock = socks.socksocket(socket.AF_INET, socket.SOCK_STREAM) - sock.setproxy(*self.proxy_info.astuple()) + use_proxy = True + proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple() + + host = proxy_host + port = proxy_port else: - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - - if has_timeout(self.timeout): - sock.settimeout(self.timeout) - sock.connect((self.host, self.port)) - self.sock =_ssl_wrap_socket(sock, self.key_file, self.cert_file) + use_proxy = False + host = self.host + port = self.port + + address_info = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM) + for family, socktype, proto, canonname, sockaddr in address_info: + try: + if use_proxy: + sock = socks.socksocket(family, socktype, proto) + + sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass) + else: + sock = socket.socket(family, socktype, proto) + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + + if has_timeout(self.timeout): + sock.settimeout(self.timeout) + sock.connect((self.host, self.port)) + self.sock =_ssl_wrap_socket( + sock, self.key_file, self.cert_file, + self.disable_ssl_certificate_validation, self.ca_certs) + if self.debuglevel > 0: + print "connect: (%s, %s)" % (self.host, self.port) + if use_proxy: + print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)) + if not self.disable_ssl_certificate_validation: + cert = self.sock.getpeercert() + hostname = self.host.split(':', 0)[0] + if not self._ValidateCertificateHostname(cert, hostname): + raise CertificateHostnameMismatch( + 'Server presented certificate that does not match ' + 'host %s: %s' % (hostname, cert), hostname, cert) + except ssl_SSLError, e: + if sock: + sock.close() + if self.sock: + self.sock.close() + self.sock = None + # Unfortunately the ssl module doesn't seem to provide any way + # to get at more detailed error information, in particular + # whether the error is due to certificate validation or + # something else (such as SSL protocol mismatch). + if e.errno == ssl.SSL_ERROR_SSL: + raise SSLHandshakeError(e) + else: + raise + except (socket.timeout, socket.gaierror): + raise + except socket.error, msg: + if self.debuglevel > 0: + print "connect fail: (%s, %s)" % (self.host, self.port) + if use_proxy: + print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)) + if self.sock: + self.sock.close() + self.sock = None + continue + break + if not self.sock: + raise socket.error, msg + +SCHEME_TO_CONNECTION = { + 'http': HTTPConnectionWithTimeout, + 'https': HTTPSConnectionWithTimeout +} + +# Use a different connection object for Google App Engine +try: + try: + from google.appengine.api import apiproxy_stub_map + if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None: + raise ImportError # Bail out; we're not actually running on App Engine. + from google.appengine.api.urlfetch import fetch + from google.appengine.api.urlfetch import InvalidURLError + except (ImportError, AttributeError): + from google3.apphosting.api import apiproxy_stub_map + if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None: + raise ImportError # Bail out; we're not actually running on App Engine. + from google3.apphosting.api.urlfetch import fetch + from google3.apphosting.api.urlfetch import InvalidURLError + + def _new_fixed_fetch(validate_certificate): + def fixed_fetch(url, payload=None, method="GET", headers={}, + allow_truncated=False, follow_redirects=True, + deadline=None): + if deadline is None: + deadline = socket.getdefaulttimeout() or 5 + return fetch(url, payload=payload, method=method, headers=headers, + allow_truncated=allow_truncated, + follow_redirects=follow_redirects, deadline=deadline, + validate_certificate=validate_certificate) + return fixed_fetch + + class AppEngineHttpConnection(httplib.HTTPConnection): + """Use httplib on App Engine, but compensate for its weirdness. + + The parameters key_file, cert_file, proxy_info, ca_certs, and + disable_ssl_certificate_validation are all dropped on the ground. + """ + def __init__(self, host, port=None, key_file=None, cert_file=None, + strict=None, timeout=None, proxy_info=None, ca_certs=None, + disable_ssl_certificate_validation=False): + httplib.HTTPConnection.__init__(self, host, port=port, + strict=strict, timeout=timeout) + + class AppEngineHttpsConnection(httplib.HTTPSConnection): + """Same as AppEngineHttpConnection, but for HTTPS URIs.""" + def __init__(self, host, port=None, key_file=None, cert_file=None, + strict=None, timeout=None, proxy_info=None, ca_certs=None, + disable_ssl_certificate_validation=False): + httplib.HTTPSConnection.__init__(self, host, port=port, + key_file=key_file, + cert_file=cert_file, strict=strict, + timeout=timeout) + self._fetch = _new_fixed_fetch( + not disable_ssl_certificate_validation) + + # Update the connection classes to use the Googel App Engine specific ones. + SCHEME_TO_CONNECTION = { + 'http': AppEngineHttpConnection, + 'https': AppEngineHttpsConnection + } +except (ImportError, AttributeError): + pass class Http(object): """An HTTP client that handles: -- all methods -- caching -- ETags -- compression, -- HTTPS -- Basic -- Digest -- WSSE - -and more. - """ - def __init__(self, cache=None, timeout=None, proxy_info=None): - """The value of proxy_info is a ProxyInfo instance. -If 'cache' is a string then it is used as a directory name -for a disk cache. Otherwise it must be an object that supports -the same interface as FileCache.""" + - all methods + - caching + - ETags + - compression, + - HTTPS + - Basic + - Digest + - WSSE + + and more. + """ + def __init__(self, cache=None, timeout=None, + proxy_info=proxy_info_from_environment, + ca_certs=None, disable_ssl_certificate_validation=False): + """If 'cache' is a string then it is used as a directory name for + a disk cache. Otherwise it must be an object that supports the + same interface as FileCache. + + All timeouts are in seconds. If None is passed for timeout + then Python's default timeout for sockets will be used. See + for example the docs of socket.setdefaulttimeout(): + http://docs.python.org/library/socket.html#socket.setdefaulttimeout + + `proxy_info` may be: + - a callable that takes the http scheme ('http' or 'https') and + returns a ProxyInfo instance per request. By default, uses + proxy_nfo_from_environment. + - a ProxyInfo instance (static proxy config). + - None (proxy disabled). + + ca_certs is the path of a file containing root CA certificates for SSL + server certificate validation. By default, a CA cert file bundled with + httplib2 is used. + + If disable_ssl_certificate_validation is true, SSL cert validation will + not be performed. + """ self.proxy_info = proxy_info + self.ca_certs = ca_certs + self.disable_ssl_certificate_validation = \ + disable_ssl_certificate_validation + # Map domain name to an httplib connection self.connections = {} # The location of the cache, for now a directory # where cached responses are held. - if cache and isinstance(cache, str): + if cache and isinstance(cache, basestring): self.cache = FileCache(cache) else: self.cache = cache @@ -821,10 +1204,10 @@ def __init__(self, cache=None, timeout=None, proxy_info=None): # If set to False then no redirects are followed, even safe ones. self.follow_redirects = True - + # Which HTTP methods do we apply optimistic concurrency to, i.e. # which methods get an "if-match:" etag header added to them. - self.optimistic_concurrency_methods = ["PUT"] + self.optimistic_concurrency_methods = ["PUT", "PATCH"] # If 'follow_redirects' is True, and this is set to True then # all redirecs are followed, including unsafe ones. @@ -832,10 +1215,27 @@ def __init__(self, cache=None, timeout=None, proxy_info=None): self.ignore_etag = False - self.force_exception_to_status_code = False + self.force_exception_to_status_code = False self.timeout = timeout + # Keep Authorization: headers on a redirect. + self.forward_authorization_headers = False + + def __getstate__(self): + state_dict = copy.copy(self.__dict__) + # In case request is augmented by some foreign object such as + # credentials which handle auth + if 'request' in state_dict: + del state_dict['request'] + if 'connections' in state_dict: + del state_dict['connections'] + return state_dict + + def __setstate__(self, state): + self.__dict__.update(state) + self.connections = {} + def _auth_from_challenge(self, host, request_uri, headers, response, content): """A generator that creates Authorization objects that can be applied to requests. @@ -863,10 +1263,13 @@ def clear_credentials(self): self.authorizations = [] def _conn_request(self, conn, request_uri, method, body, headers): - for i in range(2): + i = 0 + seen_bad_status_line = False + while i < RETRIES: + i += 1 try: - if conn.sock is None: - conn.connect() + if hasattr(conn, 'sock') and conn.sock is None: + conn.connect() conn.request(method, request_uri, body, headers) except socket.timeout: raise @@ -882,36 +1285,51 @@ def _conn_request(self, conn, request_uri, method, body, headers): err = getattr(e, 'args')[0] else: err = e.errno - if err == errno.ECONNREFUSED: # Connection refused - raise + if err in (errno.ENETUNREACH, errno.EADDRNOTAVAIL) and i < RETRIES: + continue # retry on potentially transient socket errors + raise except httplib.HTTPException: # Just because the server closed the connection doesn't apparently mean # that the server didn't send a response. - if conn.sock is None: - if i == 0: + if hasattr(conn, 'sock') and conn.sock is None: + if i < RETRIES-1: conn.close() conn.connect() continue else: conn.close() raise - if i == 0: + if i < RETRIES-1: conn.close() conn.connect() continue try: response = conn.getresponse() + except httplib.BadStatusLine: + # If we get a BadStatusLine on the first try then that means + # the connection just went stale, so retry regardless of the + # number of RETRIES set. + if not seen_bad_status_line and i == 1: + i = 0 + seen_bad_status_line = True + conn.close() + conn.connect() + continue + else: + conn.close() + raise except (socket.error, httplib.HTTPException): - if i == 0: + if i < RETRIES-1: conn.close() conn.connect() continue else: + conn.close() raise else: content = "" if method == "HEAD": - response.close() + conn.close() else: content = response.read() response = Response(response) @@ -927,12 +1345,12 @@ def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)] auth = auths and sorted(auths)[0][1] or None - if auth: + if auth: auth.request(method, request_uri, headers, body) (response, content) = self._conn_request(conn, request_uri, method, body, headers) - if auth: + if auth: if auth.response(response, body): auth.request(method, request_uri, headers, body) (response, content) = self._conn_request(conn, request_uri, method, body, headers ) @@ -940,7 +1358,7 @@ def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, if response.status == 401: for authorization in self._auth_from_challenge(host, request_uri, headers, response, content): - authorization.request(method, request_uri, headers, body) + authorization.request(method, request_uri, headers, body) (response, content) = self._conn_request(conn, request_uri, method, body, headers, ) if response.status != 401: self.authorizations.append(authorization) @@ -963,26 +1381,34 @@ def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, if response.status == 301 and method in ["GET", "HEAD"]: response['-x-permanent-redirect-url'] = response['location'] if not response.has_key('content-location'): - response['content-location'] = absolute_uri + response['content-location'] = absolute_uri _updateCache(headers, response, content, self.cache, cachekey) if headers.has_key('if-none-match'): del headers['if-none-match'] if headers.has_key('if-modified-since'): del headers['if-modified-since'] + if 'authorization' in headers and not self.forward_authorization_headers: + del headers['authorization'] if response.has_key('location'): location = response['location'] old_response = copy.deepcopy(response) if not old_response.has_key('content-location'): - old_response['content-location'] = absolute_uri - redirect_method = ((response.status == 303) and (method not in ["GET", "HEAD"])) and "GET" or method - (response, content) = self.request(location, redirect_method, body=body, headers = headers, redirections = redirections - 1) + old_response['content-location'] = absolute_uri + redirect_method = method + if response.status in [302, 303]: + redirect_method = "GET" + body = None + (response, content) = self.request( + location, method=redirect_method, + body=body, headers=headers, + redirections=redirections - 1) response.previous = old_response else: - raise RedirectLimit( _("Redirected more times than rediection_limit allows."), response, content) - elif response.status in [200, 203] and method == "GET": + raise RedirectLimit("Redirected more times than rediection_limit allows.", response, content) + elif response.status in [200, 203] and method in ["GET", "HEAD"]: # Don't cache 206's since we aren't going to handle byte range requests if not response.has_key('content-location'): - response['content-location'] = absolute_uri + response['content-location'] = absolute_uri _updateCache(headers, response, content, self.cache, cachekey) return (response, content) @@ -997,24 +1423,25 @@ def _normalize_headers(self, headers): def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None): """ Performs a single HTTP request. -The 'uri' is the URI of the HTTP resource and can begin -with either 'http' or 'https'. The value of 'uri' must be an absolute URI. -The 'method' is the HTTP method to perform, such as GET, POST, DELETE, etc. -There is no restriction on the methods allowed. + The 'uri' is the URI of the HTTP resource and can begin with either + 'http' or 'https'. The value of 'uri' must be an absolute URI. -The 'body' is the entity body to be sent with the request. It is a string -object. + The 'method' is the HTTP method to perform, such as GET, POST, DELETE, + etc. There is no restriction on the methods allowed. -Any extra headers that are to be sent with the request should be provided in the -'headers' dictionary. + The 'body' is the entity body to be sent with the request. It is a + string object. -The maximum number of redirect to follow before raising an -exception is 'redirections. The default is 5. + Any extra headers that are to be sent with the request should be + provided in the 'headers' dictionary. -The return value is a tuple of (response, content), the first -being and instance of the 'Response' class, the second being -a string that contains the response entity body. + The maximum number of redirect to follow before raising an + exception is 'redirections. The default is 5. + + The return value is a tuple of (response, content), the first + being and instance of the 'Response' class, the second being + a string that contains the response entity body. """ try: if headers is None: @@ -1023,7 +1450,7 @@ def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAU headers = self._normalize_headers(headers) if not headers.has_key('user-agent'): - headers['user-agent'] = "Python-httplib2/%s" % __version__ + headers['user-agent'] = "Python-httplib2/%s (gzip)" % __version__ uri = iri2uri(uri) @@ -1033,27 +1460,44 @@ def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAU scheme = 'https' authority = domain_port[0] + proxy_info = self._get_proxy_info(scheme, authority) + conn_key = scheme+":"+authority if conn_key in self.connections: conn = self.connections[conn_key] else: if not connection_type: - connection_type = (scheme == 'https') and HTTPSConnectionWithTimeout or HTTPConnectionWithTimeout + connection_type = SCHEME_TO_CONNECTION[scheme] certs = list(self.certificates.iter(authority)) - if scheme == 'https' and certs: - conn = self.connections[conn_key] = connection_type(authority, key_file=certs[0][0], - cert_file=certs[0][1], timeout=self.timeout, proxy_info=self.proxy_info) + if scheme == 'https': + if certs: + conn = self.connections[conn_key] = connection_type( + authority, key_file=certs[0][0], + cert_file=certs[0][1], timeout=self.timeout, + proxy_info=proxy_info, + ca_certs=self.ca_certs, + disable_ssl_certificate_validation= + self.disable_ssl_certificate_validation) + else: + conn = self.connections[conn_key] = connection_type( + authority, timeout=self.timeout, + proxy_info=proxy_info, + ca_certs=self.ca_certs, + disable_ssl_certificate_validation= + self.disable_ssl_certificate_validation) else: - conn = self.connections[conn_key] = connection_type(authority, timeout=self.timeout, proxy_info=self.proxy_info) + conn = self.connections[conn_key] = connection_type( + authority, timeout=self.timeout, + proxy_info=proxy_info) conn.set_debuglevel(debuglevel) - if method in ["GET", "HEAD"] and 'range' not in headers and 'accept-encoding' not in headers: + if 'range' not in headers and 'accept-encoding' not in headers: headers['accept-encoding'] = 'gzip, deflate' info = email.Message.Message() cached_value = None if self.cache: - cachekey = defrag_uri + cachekey = defrag_uri.encode('utf-8') cached_value = self.cache.get(cachekey) if cached_value: # info = email.message_from_string(cached_value) @@ -1067,7 +1511,7 @@ def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAU feedparser.feed(info) info = feedparser.close() feedparser._parse = None - except IndexError, ValueError: + except (IndexError, ValueError): self.cache.delete(cachekey) cachekey = None cached_value = None @@ -1090,27 +1534,31 @@ def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAU for header in vary_headers: key = '-varied-%s' % header value = info[key] - if headers.get(header, '') != value: - cached_value = None - break + if headers.get(header, None) != value: + cached_value = None + break if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers: if info.has_key('-x-permanent-redirect-url'): # Should cached permanent redirects be counted in our redirection count? For now, yes. - (response, new_content) = self.request(info['-x-permanent-redirect-url'], "GET", headers = headers, redirections = redirections - 1) + if redirections <= 0: + raise RedirectLimit("Redirected more times than rediection_limit allows.", {}, "") + (response, new_content) = self.request( + info['-x-permanent-redirect-url'], method='GET', + headers=headers, redirections=redirections - 1) response.previous = Response(info) response.previous.fromcache = True else: # Determine our course of action: # Is the cached entry fresh or stale? # Has the client requested a non-cached response? - # - # There seems to be three possible answers: + # + # There seems to be three possible answers: # 1. [FRESH] Return the cache entry w/o doing a GET # 2. [STALE] Do the GET (but add in cache validators if available) # 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request - entry_disposition = _entry_disposition(info, headers) - + entry_disposition = _entry_disposition(info, headers) + if entry_disposition == "FRESH": if not cached_value: info['status'] = '504' @@ -1132,7 +1580,7 @@ def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAU if response.status == 304 and method == "GET": # Rewrite the cache entry with the new end-to-end headers - # Take all headers that are in response + # Take all headers that are in response # and overwrite their values in info. # unless they are hop-by-hop, or are listed in the connection header. @@ -1144,14 +1592,14 @@ def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAU _updateCache(headers, merged_response, content, self.cache, cachekey) response = merged_response response.status = 200 - response.fromcache = True + response.fromcache = True elif response.status == 200: content = new_content else: self.cache.delete(cachekey) - content = new_content - else: + content = new_content + else: cc = _parse_cache_control(headers) if cc.has_key('only-if-cached'): info['status'] = '504' @@ -1165,34 +1613,47 @@ def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAU response = e.response content = e.content response.status = 500 - response.reason = str(e) - elif isinstance(e, socket.timeout) or (isinstance(e, socket.error) and 'timed out' in str(e)): + response.reason = str(e) + elif isinstance(e, socket.timeout): content = "Request Timeout" - response = Response( { - "content-type": "text/plain", - "status": "408", - "content-length": len(content) - }) + response = Response({ + "content-type": "text/plain", + "status": "408", + "content-length": len(content) + }) response.reason = "Request Timeout" else: - content = str(e) - response = Response( { - "content-type": "text/plain", - "status": "400", - "content-length": len(content) - }) - response.reason = "Bad Request" + content = str(e) + response = Response({ + "content-type": "text/plain", + "status": "400", + "content-length": len(content) + }) + response.reason = "Bad Request" else: raise - + return (response, content) - + def _get_proxy_info(self, scheme, authority): + """Return a ProxyInfo instance (or None) based on the scheme + and authority. + """ + hostname, port = urllib.splitport(authority) + proxy_info = self.proxy_info + if callable(proxy_info): + proxy_info = proxy_info(scheme) + + if (hasattr(proxy_info, 'applies_to') + and not proxy_info.applies_to(hostname)): + proxy_info = None + return proxy_info + class Response(dict): """An object more like email.Message than httplib.HTTPResponse.""" - + """Is this response from our local cache""" fromcache = False @@ -1208,27 +1669,28 @@ class Response(dict): previous = None def __init__(self, info): - # info is either an email.Message or + # info is either an email.Message or # an httplib.HTTPResponse object. if isinstance(info, httplib.HTTPResponse): - for key, value in info.getheaders(): - self[key.lower()] = value + for key, value in info.getheaders(): + self[key.lower()] = value self.status = info.status self['status'] = str(self.status) self.reason = info.reason self.version = info.version elif isinstance(info, email.Message.Message): - for key, value in info.items(): - self[key] = value + for key, value in info.items(): + self[key.lower()] = value self.status = int(self['status']) else: - for key, value in info.iteritems(): - self[key] = value + for key, value in info.iteritems(): + self[key.lower()] = value self.status = int(self.get('status', self.status)) + self.reason = self.get('reason', self.reason) def __getattr__(self, name): if name == 'dict': - return self - else: - raise AttributeError, name + return self + else: + raise AttributeError, name diff --git a/lib/httplib2/cacerts.txt b/lib/httplib2/cacerts.txt new file mode 100644 index 0000000000..70990f1f82 --- /dev/null +++ b/lib/httplib2/cacerts.txt @@ -0,0 +1,2183 @@ +# Issuer: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc. +# Subject: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc. +# Label: "GTE CyberTrust Global Root" +# Serial: 421 +# MD5 Fingerprint: ca:3d:d3:68:f1:03:5c:d0:32:fa:b8:2b:59:e8:5a:db +# SHA1 Fingerprint: 97:81:79:50:d8:1c:96:70:cc:34:d8:09:cf:79:44:31:36:7e:f4:74 +# SHA256 Fingerprint: a5:31:25:18:8d:21:10:aa:96:4b:02:c7:b7:c6:da:32:03:17:08:94:e5:fb:71:ff:fb:66:67:d5:e6:81:0a:36 +-----BEGIN CERTIFICATE----- +MIICWjCCAcMCAgGlMA0GCSqGSIb3DQEBBAUAMHUxCzAJBgNVBAYTAlVTMRgwFgYD +VQQKEw9HVEUgQ29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNv +bHV0aW9ucywgSW5jLjEjMCEGA1UEAxMaR1RFIEN5YmVyVHJ1c3QgR2xvYmFsIFJv +b3QwHhcNOTgwODEzMDAyOTAwWhcNMTgwODEzMjM1OTAwWjB1MQswCQYDVQQGEwJV +UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU +cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds +b2JhbCBSb290MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVD6C28FCc6HrH +iM3dFw4usJTQGz0O9pTAipTHBsiQl8i4ZBp6fmw8U+E3KHNgf7KXUwefU/ltWJTS +r41tiGeA5u2ylc9yMcqlHHK6XALnZELn+aks1joNrI1CqiQBOeacPwGFVw1Yh0X4 +04Wqk2kmhXBIgD8SFcd5tB8FLztimQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAG3r +GwnpXtlR22ciYaQqPEh346B8pt5zohQDhT37qw4wxYMWM4ETCJ57NE7fQMh017l9 +3PR2VX2bY1QY6fDq81yx2YtCHrnAlU66+tXifPVoYb+O7AWXX1uw16OFNMQkpw0P +lZPvy5TYnh+dXIVtx6quTx8itc2VrbqnzPmrC3p/ +-----END CERTIFICATE----- + +# Issuer: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division +# Subject: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division +# Label: "Thawte Server CA" +# Serial: 1 +# MD5 Fingerprint: c5:70:c4:a2:ed:53:78:0c:c8:10:53:81:64:cb:d0:1d +# SHA1 Fingerprint: 23:e5:94:94:51:95:f2:41:48:03:b4:d5:64:d2:a3:a3:f5:d8:8b:8c +# SHA256 Fingerprint: b4:41:0b:73:e2:e6:ea:ca:47:fb:c4:2f:8f:a4:01:8a:f4:38:1d:c5:4c:fa:a8:44:50:46:1e:ed:09:45:4d:e9 +-----BEGIN CERTIFICATE----- +MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx +FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD +VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv +biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm +MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx +MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT +DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3 +dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl +cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3 +DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD +gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91 +yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX +L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj +EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG +7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e +QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ +qdq5snUb9kLy78fyGPmJvKP/iiMucEc= +-----END CERTIFICATE----- + +# Issuer: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division +# Subject: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division +# Label: "Thawte Premium Server CA" +# Serial: 1 +# MD5 Fingerprint: 06:9f:69:79:16:66:90:02:1b:8c:8c:a2:c3:07:6f:3a +# SHA1 Fingerprint: 62:7f:8d:78:27:65:63:99:d2:7d:7f:90:44:c9:fe:b3:f3:3e:fa:9a +# SHA256 Fingerprint: ab:70:36:36:5c:71:54:aa:29:c2:c2:9f:5d:41:91:16:3b:16:2a:22:25:01:13:57:d5:6d:07:ff:a7:bc:1f:72 +-----BEGIN CERTIFICATE----- +MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx +FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD +VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv +biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy +dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t +MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB +MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG +A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp +b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl +cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv +bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE +VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ +ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR +uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG +9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI +hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM +pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg== +-----END CERTIFICATE----- + +# Issuer: O=Equifax OU=Equifax Secure Certificate Authority +# Subject: O=Equifax OU=Equifax Secure Certificate Authority +# Label: "Equifax Secure CA" +# Serial: 903804111 +# MD5 Fingerprint: 67:cb:9d:c0:13:24:8a:82:9b:b2:17:1e:d1:1b:ec:d4 +# SHA1 Fingerprint: d2:32:09:ad:23:d3:14:23:21:74:e4:0d:7f:9d:62:13:97:86:63:3a +# SHA256 Fingerprint: 08:29:7a:40:47:db:a2:36:80:c7:31:db:6e:31:76:53:ca:78:48:e1:be:bd:3a:0b:01:79:a7:07:f9:2c:f1:78 +-----BEGIN CERTIFICATE----- +MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV +UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy +dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1 +MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx +dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B +AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f +BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A +cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC +AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ +MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm +aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw +ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj +IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF +MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA +A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y +7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh +1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4 +-----END CERTIFICATE----- + +# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority +# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority +# Label: "Verisign Class 3 Public Primary Certification Authority" +# Serial: 149843929435818692848040365716851702463 +# MD5 Fingerprint: 10:fc:63:5d:f6:26:3e:0d:f3:25:be:5f:79:cd:67:67 +# SHA1 Fingerprint: 74:2c:31:92:e6:07:e4:24:eb:45:49:54:2b:e1:bb:c5:3e:61:74:e2 +# SHA256 Fingerprint: e7:68:56:34:ef:ac:f6:9a:ce:93:9a:6b:25:5b:7b:4f:ab:ef:42:93:5b:50:a2:65:ac:b5:cb:60:27:e4:4e:70 +-----BEGIN CERTIFICATE----- +MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkG +A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz +cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2 +MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV +BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt +YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN +ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE +BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is +I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G +CSqGSIb3DQEBAgUAA4GBALtMEivPLCYATxQT3ab7/AoRhIzzKBxnki98tsX63/Do +lbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59AhWM1pF+NEHJwZRDmJXNyc +AA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2OmufTqj/ZA1k +-----END CERTIFICATE----- + +# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network +# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network +# Label: "Verisign Class 3 Public Primary Certification Authority - G2" +# Serial: 167285380242319648451154478808036881606 +# MD5 Fingerprint: a2:33:9b:4c:74:78:73:d4:6c:e7:c1:f3:8d:cb:5c:e9 +# SHA1 Fingerprint: 85:37:1c:a6:e5:50:14:3d:ce:28:03:47:1b:de:3a:09:e8:f8:77:0f +# SHA256 Fingerprint: 83:ce:3c:12:29:68:8a:59:3d:48:5f:81:97:3c:0f:91:95:43:1e:da:37:cc:5e:36:43:0e:79:c7:a8:88:63:8b +-----BEGIN CERTIFICATE----- +MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ +BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh +c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy +MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp +emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X +DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw +FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg +UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo +YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5 +MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB +AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4 +pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0 +13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID +AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk +U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i +F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY +oJ2daZH9 +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Label: "GlobalSign Root CA" +# Serial: 4835703278459707669005204 +# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a +# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c +# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 +-----BEGIN CERTIFICATE----- +MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG +A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv +b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw +MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i +YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT +aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ +jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp +xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp +1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG +snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ +U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 +9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B +AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz +yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE +38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP +AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad +DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME +HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 +# Label: "GlobalSign Root CA - R2" +# Serial: 4835703278459682885658125 +# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30 +# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe +# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e +-----BEGIN CERTIFICATE----- +MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1 +MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL +v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8 +eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq +tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd +C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa +zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB +mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH +V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n +bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG +3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs +J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO +291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS +ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd +AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 +TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== +-----END CERTIFICATE----- + +# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority +# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority +# Label: "ValiCert Class 1 VA" +# Serial: 1 +# MD5 Fingerprint: 65:58:ab:15:ad:57:6c:1e:a8:a7:b5:69:ac:bf:ff:eb +# SHA1 Fingerprint: e5:df:74:3c:b6:01:c4:9b:98:43:dc:ab:8c:e8:6a:81:10:9f:e4:8e +# SHA256 Fingerprint: f4:c1:49:55:1a:30:13:a3:5b:c7:bf:fe:17:a7:f3:44:9b:c1:ab:5b:5a:0a:e7:4b:06:c2:3b:90:00:4c:01:04 +-----BEGIN CERTIFICATE----- +MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 +IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz +BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y +aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG +9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNTIyMjM0OFoXDTE5MDYy +NTIyMjM0OFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y +azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs +YXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw +Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl +cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDYWYJ6ibiWuqYvaG9Y +LqdUHAZu9OqNSLwxlBfw8068srg1knaw0KWlAdcAAxIiGQj4/xEjm84H9b9pGib+ +TunRf50sQB1ZaG6m+FiwnRqP0z/x3BkGgagO4DrdyFNFCQbmD3DD+kCmDuJWBQ8Y +TfwggtFzVXSNdnKgHZ0dwN0/cQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFBoPUn0 +LBwGlN+VYH+Wexf+T3GtZMjdd9LvWVXoP+iOBSoh8gfStadS/pyxtuJbdxdA6nLW +I8sogTLDAHkY7FkXicnGah5xyf23dKUlRWnFSKsZ4UWKJWsZ7uW7EvV/96aNUcPw +nXS3qT6gpf+2SQMT2iLM7XGCK5nPOrf1LXLI +-----END CERTIFICATE----- + +# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority +# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority +# Label: "ValiCert Class 2 VA" +# Serial: 1 +# MD5 Fingerprint: a9:23:75:9b:ba:49:36:6e:31:c2:db:f2:e7:66:ba:87 +# SHA1 Fingerprint: 31:7a:2a:d0:7f:2b:33:5e:f5:a1:c3:4e:4b:57:e8:b7:d8:f1:fc:a6 +# SHA256 Fingerprint: 58:d0:17:27:9c:d4:dc:63:ab:dd:b1:96:a6:c9:90:6c:30:c4:e0:87:83:ea:e8:c1:60:99:54:d6:93:55:59:6b +-----BEGIN CERTIFICATE----- +MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 +IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz +BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y +aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG +9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy +NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y +azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs +YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw +Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl +cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY +dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9 +WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS +v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v +UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu +IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC +W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd +-----END CERTIFICATE----- + +# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority +# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority +# Label: "RSA Root Certificate 1" +# Serial: 1 +# MD5 Fingerprint: a2:6f:53:b7:ee:40:db:4a:68:e7:fa:18:d9:10:4b:72 +# SHA1 Fingerprint: 69:bd:8c:f4:9c:d3:00:fb:59:2e:17:93:ca:55:6a:f3:ec:aa:35:fb +# SHA256 Fingerprint: bc:23:f9:8a:31:3c:b9:2d:e3:bb:fc:3a:5a:9f:44:61:ac:39:49:4c:4a:e1:5a:9e:9d:f1:31:e9:9b:73:01:9a +-----BEGIN CERTIFICATE----- +MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 +IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz +BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y +aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG +9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy +NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y +azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs +YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw +Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl +cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD +cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs +2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY +JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE +Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ +n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A +PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only +# Label: "Verisign Class 3 Public Primary Certification Authority - G3" +# Serial: 206684696279472310254277870180966723415 +# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09 +# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6 +# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44 +-----BEGIN CERTIFICATE----- +MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl +cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu +LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT +aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp +dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD +VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT +aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ +bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu +IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg +LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b +N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t +KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu +kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm +CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ +Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu +imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te +2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe +DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC +/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p +F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt +TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only +# Label: "Verisign Class 4 Public Primary Certification Authority - G3" +# Serial: 314531972711909413743075096039378935511 +# MD5 Fingerprint: db:c8:f2:27:2e:b1:ea:6a:29:23:5d:fe:56:3e:33:df +# SHA1 Fingerprint: c8:ec:8c:87:92:69:cb:4b:ab:39:e9:8d:7e:57:67:f3:14:95:73:9d +# SHA256 Fingerprint: e3:89:36:0d:0f:db:ae:b3:d2:50:58:4b:47:30:31:4e:22:2f:39:c1:56:a0:20:14:4e:8d:96:05:61:79:15:06 +-----BEGIN CERTIFICATE----- +MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl +cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu +LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT +aWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp +dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD +VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT +aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ +bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu +IENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg +LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK3LpRFpxlmr8Y+1 +GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaStBO3IFsJ ++mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0Gbd +U6LM8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLm +NxdLMEYH5IBtptiWLugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XY +ufTsgsbSPZUd5cBPhMnZo0QoBmrXRazwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/ +ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAj/ola09b5KROJ1WrIhVZPMq1 +CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXttmhwwjIDLk5Mq +g6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKm +fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c +2NU8Qh0XwRJdRTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/ +bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Subject: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Label: "Entrust.net Secure Server CA" +# Serial: 927650371 +# MD5 Fingerprint: df:f2:80:73:cc:f1:e6:61:73:fc:f5:42:e9:c5:7c:ee +# SHA1 Fingerprint: 99:a6:9b:e6:1a:fe:88:6b:4d:2b:82:00:7c:b8:54:fc:31:7e:15:39 +# SHA256 Fingerprint: 62:f2:40:27:8c:56:4c:4d:d8:bf:7d:9d:4f:6f:36:6e:a8:94:d2:2f:5f:34:d9:89:a9:83:ac:ec:2f:ff:ed:50 +-----BEGIN CERTIFICATE----- +MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC +VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u +ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc +KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u +ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1 +MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE +ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j +b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF +bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg +U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA +A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/ +I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3 +wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC +AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb +oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5 +BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p +dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk +MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp +b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu +dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0 +MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi +E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa +MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI +hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN +95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd +2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI= +-----END CERTIFICATE----- + +# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Label: "Entrust.net Premium 2048 Secure Server CA" +# Serial: 946059622 +# MD5 Fingerprint: ba:21:ea:20:d6:dd:db:8f:c1:57:8b:40:ad:a1:fc:fc +# SHA1 Fingerprint: 80:1d:62:d0:7b:44:9d:5c:5c:03:5c:98:ea:61:fa:44:3c:2a:58:fe +# SHA256 Fingerprint: d1:c3:39:ea:27:84:eb:87:0f:93:4f:c5:63:4e:4a:a9:ad:55:05:01:64:01:f2:64:65:d3:7a:57:46:63:35:9f +-----BEGIN CERTIFICATE----- +MIIEXDCCA0SgAwIBAgIEOGO5ZjANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML +RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp +bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 +IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0xOTEy +MjQxODIwNTFaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 +LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp +YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG +A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq +K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe +sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX +MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT +XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ +HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH +4QIDAQABo3QwcjARBglghkgBhvhCAQEEBAMCAAcwHwYDVR0jBBgwFoAUVeSB0RGA +vtiJuQijMfmhJAkWuXAwHQYDVR0OBBYEFFXkgdERgL7YibkIozH5oSQJFrlwMB0G +CSqGSIb2fQdBAAQQMA4bCFY1LjA6NC4wAwIEkDANBgkqhkiG9w0BAQUFAAOCAQEA +WUesIYSKF8mciVMeuoCFGsY8Tj6xnLZ8xpJdGGQC49MGCBFhfGPjK50xA3B20qMo +oPS7mmNz7W3lKtvtFKkrxjYR0CvrB4ul2p5cGZ1WEvVUKcgF7bISKo30Axv/55IQ +h7A6tcOdBTcSo8f0FbnVpDkWm1M6I5HxqIKiaohowXkCIryqptau37AUX7iH0N18 +f3v/rxzP5tsHrV7bhZ3QKw0z2wTR5klAEyt2+z7pnIkPFc4YsIV4IU9rTw76NmfN +B/L/CNDi3tm/Kq+4h4YhPATKt5Rof8886ZjXOP/swNlQ8C5LWK5Gb9Auw2DaclVy +vUxFnmG6v4SBkgPR0ml8xQ== +-----END CERTIFICATE----- + +# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Label: "Baltimore CyberTrust Root" +# Serial: 33554617 +# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 +# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 +# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ +RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD +VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX +DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y +ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy +VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr +mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr +IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK +mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu +XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy +dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye +jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 +BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 +DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 +9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx +jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 +Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz +ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS +R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp +-----END CERTIFICATE----- + +# Issuer: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc. +# Subject: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc. +# Label: "Equifax Secure Global eBusiness CA" +# Serial: 1 +# MD5 Fingerprint: 8f:5d:77:06:27:c4:98:3c:5b:93:78:e7:d7:7d:9b:cc +# SHA1 Fingerprint: 7e:78:4a:10:1c:82:65:cc:2d:e1:f1:6d:47:b4:40:ca:d9:0a:19:45 +# SHA256 Fingerprint: 5f:0b:62:ea:b5:e3:53:ea:65:21:65:16:58:fb:b6:53:59:f4:43:28:0a:4a:fb:d1:04:d7:7d:10:f9:f0:4c:07 +-----BEGIN CERTIFICATE----- +MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc +MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT +ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw +MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj +dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l +c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC +UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc +58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/ +o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr +aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA +A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA +Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv +8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV +-----END CERTIFICATE----- + +# Issuer: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc. +# Subject: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc. +# Label: "Equifax Secure eBusiness CA 1" +# Serial: 4 +# MD5 Fingerprint: 64:9c:ef:2e:44:fc:c6:8f:52:07:d0:51:73:8f:cb:3d +# SHA1 Fingerprint: da:40:18:8b:91:89:a3:ed:ee:ae:da:97:fe:2f:9d:f5:b7:d1:8a:41 +# SHA256 Fingerprint: cf:56:ff:46:a4:a1:86:10:9d:d9:65:84:b5:ee:b5:8a:51:0c:42:75:b0:e5:f9:4f:40:bb:ae:86:5e:19:f6:73 +-----BEGIN CERTIFICATE----- +MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc +MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT +ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw +MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j +LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ +KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo +RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu +WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw +Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD +AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK +eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM +zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+ +WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN +/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ== +-----END CERTIFICATE----- + +# Issuer: O=Equifax Secure OU=Equifax Secure eBusiness CA-2 +# Subject: O=Equifax Secure OU=Equifax Secure eBusiness CA-2 +# Label: "Equifax Secure eBusiness CA 2" +# Serial: 930140085 +# MD5 Fingerprint: aa:bf:bf:64:97:da:98:1d:6f:c6:08:3a:95:70:33:ca +# SHA1 Fingerprint: 39:4f:f6:85:0b:06:be:52:e5:18:56:cc:10:e1:80:e8:82:b3:85:cc +# SHA256 Fingerprint: 2f:27:4e:48:ab:a4:ac:7b:76:59:33:10:17:75:50:6d:c3:0e:e3:8e:f6:ac:d5:c0:49:32:cf:e0:41:23:42:20 +-----BEGIN CERTIFICATE----- +MIIDIDCCAomgAwIBAgIEN3DPtTANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV +UzEXMBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2Vj +dXJlIGVCdXNpbmVzcyBDQS0yMB4XDTk5MDYyMzEyMTQ0NVoXDTE5MDYyMzEyMTQ0 +NVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkVxdWlmYXggU2VjdXJlMSYwJAYD +VQQLEx1FcXVpZmF4IFNlY3VyZSBlQnVzaW5lc3MgQ0EtMjCBnzANBgkqhkiG9w0B +AQEFAAOBjQAwgYkCgYEA5Dk5kx5SBhsoNviyoynF7Y6yEb3+6+e0dMKP/wXn2Z0G +vxLIPw7y1tEkshHe0XMJitSxLJgJDR5QRrKDpkWNYmi7hRsgcDKqQM2mll/EcTc/ +BPO3QSQ5BxoeLmFYoBIL5aXfxavqN3HMHMg3OrmXUqesxWoklE6ce8/AatbfIb0C +AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEX +MBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2VjdXJl +IGVCdXNpbmVzcyBDQS0yMQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTkw +NjIzMTIxNDQ1WjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUUJ4L6q9euSBIplBq +y/3YIHqngnYwHQYDVR0OBBYEFFCeC+qvXrkgSKZQasv92CB6p4J2MAwGA1UdEwQF +MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA +A4GBAAyGgq3oThr1jokn4jVYPSm0B482UJW/bsGe68SQsoWou7dC4A8HOd/7npCy +0cE+U58DRLB+S/Rv5Hwf5+Kx5Lia78O9zt4LMjTZ3ijtM2vE1Nc9ElirfQkty3D1 +E4qUoSek1nDFbZS1yX2doNLGCEnZZpum0/QL3MUmV+GRMOrN +-----END CERTIFICATE----- + +# Issuer: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network +# Subject: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network +# Label: "AddTrust Low-Value Services Root" +# Serial: 1 +# MD5 Fingerprint: 1e:42:95:02:33:92:6b:b9:5f:c0:7f:da:d6:b2:4b:fc +# SHA1 Fingerprint: cc:ab:0e:a0:4c:23:01:d6:69:7b:dd:37:9f:cd:12:eb:24:e3:94:9d +# SHA256 Fingerprint: 8c:72:09:27:9a:c0:4e:27:5e:16:d0:7f:d3:b7:75:e8:01:54:b5:96:80:46:e3:1f:52:dd:25:76:63:24:e9:a7 +-----BEGIN CERTIFICATE----- +MIIEGDCCAwCgAwIBAgIBATANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQGEwJTRTEU +MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3 +b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwHhcNMDAwNTMw +MTAzODMxWhcNMjAwNTMwMTAzODMxWjBlMQswCQYDVQQGEwJTRTEUMBIGA1UEChML +QWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYD +VQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUA +A4IBDwAwggEKAoIBAQCWltQhSWDia+hBBwzexODcEyPNwTXH+9ZOEQpnXvUGW2ul +CDtbKRY654eyNAbFvAWlA3yCyykQruGIgb3WntP+LVbBFc7jJp0VLhD7Bo8wBN6n +tGO0/7Gcrjyvd7ZWxbWroulpOj0OM3kyP3CCkplhbY0wCI9xP6ZIVxn4JdxLZlyl +dI+Yrsj5wAYi56xz36Uu+1LcsRVlIPo1Zmne3yzxbrww2ywkEtvrNTVokMsAsJch +PXQhI2U0K7t4WaPW4XY5mqRJjox0r26kmqPZm9I4XJuiGMx1I4S+6+JNM3GOGvDC ++Mcdoq0Dlyz4zyXG9rgkMbFjXZJ/Y/AlyVMuH79NAgMBAAGjgdIwgc8wHQYDVR0O +BBYEFJWxtPCUtr3H2tERCSG+wa9J/RB7MAsGA1UdDwQEAwIBBjAPBgNVHRMBAf8E +BTADAQH/MIGPBgNVHSMEgYcwgYSAFJWxtPCUtr3H2tERCSG+wa9J/RB7oWmkZzBl +MQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFk +ZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENB +IFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBACxtZBsfzQ3duQH6lmM0MkhHma6X +7f1yFqZzR1r0693p9db7RcwpiURdv0Y5PejuvE1Uhh4dbOMXJ0PhiVYrqW9yTkkz +43J8KiOavD7/KCrto/8cI7pDVwlnTUtiBi34/2ydYB7YHEt9tTEv2dB8Xfjea4MY +eDdXL+gzB2ffHsdrKpV2ro9Xo/D0UrSpUwjP4E/TelOL/bscVjby/rK25Xa71SJl +pz/+0WatC7xrmYbvP33zGDLKe8bjq2RGlfgmadlVg3sslgf/WSxEo8bl6ancoWOA +WiFeIc9TVPC6b4nbqKqVz4vjccweGyBECMB6tkD9xOQ14R0WHNC8K47Wcdk= +-----END CERTIFICATE----- + +# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network +# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network +# Label: "AddTrust External Root" +# Serial: 1 +# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f +# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68 +# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2 +-----BEGIN CERTIFICATE----- +MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU +MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs +IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290 +MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux +FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h +bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v +dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt +H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9 +uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX +mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX +a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN +E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0 +WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD +VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0 +Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU +cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx +IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN +AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH +YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5 +6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC +Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX +c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a +mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ= +-----END CERTIFICATE----- + +# Issuer: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network +# Subject: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network +# Label: "AddTrust Public Services Root" +# Serial: 1 +# MD5 Fingerprint: c1:62:3e:23:c5:82:73:9c:03:59:4b:2b:e9:77:49:7f +# SHA1 Fingerprint: 2a:b6:28:48:5e:78:fb:f3:ad:9e:79:10:dd:6b:df:99:72:2c:96:e5 +# SHA256 Fingerprint: 07:91:ca:07:49:b2:07:82:aa:d3:c7:d7:bd:0c:df:c9:48:58:35:84:3e:b2:d7:99:60:09:ce:43:ab:6c:69:27 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIBATANBgkqhkiG9w0BAQUFADBkMQswCQYDVQQGEwJTRTEU +MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3 +b3JrMSAwHgYDVQQDExdBZGRUcnVzdCBQdWJsaWMgQ0EgUm9vdDAeFw0wMDA1MzAx +MDQxNTBaFw0yMDA1MzAxMDQxNTBaMGQxCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtB +ZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIDAeBgNV +BAMTF0FkZFRydXN0IFB1YmxpYyBDQSBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOC +AQ8AMIIBCgKCAQEA6Rowj4OIFMEg2Dybjxt+A3S72mnTRqX4jsIMEZBRpS9mVEBV +6tsfSlbunyNu9DnLoblv8n75XYcmYZ4c+OLspoH4IcUkzBEMP9smcnrHAZcHF/nX +GCwwfQ56HmIexkvA/X1id9NEHif2P0tEs7c42TkfYNVRknMDtABp4/MUTu7R3AnP +dzRGULD4EfL+OHn3Bzn+UZKXC1sIXzSGAa2Il+tmzV7R/9x98oTaunet3IAIx6eH +1lWfl2royBFkuucZKT8Rs3iQhCBSWxHveNCD9tVIkNAwHM+A+WD+eeSI8t0A65RF +62WUaUC6wNW0uLp9BBGo6zEFlpROWCGOn9Bg/QIDAQABo4HRMIHOMB0GA1UdDgQW +BBSBPjfYkrAfd59ctKtzquf2NGAv+jALBgNVHQ8EBAMCAQYwDwYDVR0TAQH/BAUw +AwEB/zCBjgYDVR0jBIGGMIGDgBSBPjfYkrAfd59ctKtzquf2NGAv+qFopGYwZDEL +MAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQLExRBZGRU +cnVzdCBUVFAgTmV0d29yazEgMB4GA1UEAxMXQWRkVHJ1c3QgUHVibGljIENBIFJv +b3SCAQEwDQYJKoZIhvcNAQEFBQADggEBAAP3FUr4JNojVhaTdt02KLmuG7jD8WS6 +IBh4lSknVwW8fCr0uVFV2ocC3g8WFzH4qnkuCRO7r7IgGRLlk/lL+YPoRNWyQSW/ +iHVv/xD8SlTQX/D67zZzfRs2RcYhbbQVuE7PnFylPVoAjgbjPGsye/Kf8Lb93/Ao +GEjwxrzQvzSAlsJKsW2Ox5BF3i9nrEUEo3rcVZLJR2bYGozH7ZxOmuASu7VqTITh +4SINhwBk/ox9Yjllpu9CtoAlEmEBqCQTcAARJl/6NVDFSMwGR+gn2HCNX2TmoUQm +XiLsks3/QppEIW1cxeMiHV9HEufOX1362KqxMy3ZdvJOOjMMK7MtkAY= +-----END CERTIFICATE----- + +# Issuer: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network +# Subject: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network +# Label: "AddTrust Qualified Certificates Root" +# Serial: 1 +# MD5 Fingerprint: 27:ec:39:47:cd:da:5a:af:e2:9a:01:65:21:a9:4c:bb +# SHA1 Fingerprint: 4d:23:78:ec:91:95:39:b5:00:7f:75:8f:03:3b:21:1e:c5:4d:8b:cf +# SHA256 Fingerprint: 80:95:21:08:05:db:4b:bc:35:5e:44:28:d8:fd:6e:c2:cd:e3:ab:5f:b9:7a:99:42:98:8e:b8:f4:dc:d0:60:16 +-----BEGIN CERTIFICATE----- +MIIEHjCCAwagAwIBAgIBATANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJTRTEU +MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3 +b3JrMSMwIQYDVQQDExpBZGRUcnVzdCBRdWFsaWZpZWQgQ0EgUm9vdDAeFw0wMDA1 +MzAxMDQ0NTBaFw0yMDA1MzAxMDQ0NTBaMGcxCzAJBgNVBAYTAlNFMRQwEgYDVQQK +EwtBZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIzAh +BgNVBAMTGkFkZFRydXN0IFF1YWxpZmllZCBDQSBSb290MIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA5B6a/twJWoekn0e+EV+vhDTbYjx5eLfpMLXsDBwq +xBb/4Oxx64r1EW7tTw2R0hIYLUkVAcKkIhPHEWT/IhKauY5cLwjPcWqzZwFZ8V1G +87B4pfYOQnrjfxvM0PC3KP0q6p6zsLkEqv32x7SxuCqg+1jxGaBvcCV+PmlKfw8i +2O+tCBGaKZnhqkRFmhJePp1tUvznoD1oL/BLcHwTOK28FSXx1s6rosAx1i+f4P8U +WfyEk9mHfExUE+uf0S0R+Bg6Ot4l2ffTQO2kBhLEO+GRwVY18BTcZTYJbqukB8c1 +0cIDMzZbdSZtQvESa0NvS3GU+jQd7RNuyoB/mC9suWXY6QIDAQABo4HUMIHRMB0G +A1UdDgQWBBQ5lYtii1zJ1IC6WA+XPxUIQ8yYpzALBgNVHQ8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zCBkQYDVR0jBIGJMIGGgBQ5lYtii1zJ1IC6WA+XPxUIQ8yYp6Fr +pGkwZzELMAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQL +ExRBZGRUcnVzdCBUVFAgTmV0d29yazEjMCEGA1UEAxMaQWRkVHJ1c3QgUXVhbGlm +aWVkIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBABmrder4i2VhlRO6aQTv +hsoToMeqT2QbPxj2qC0sVY8FtzDqQmodwCVRLae/DLPt7wh/bDxGGuoYQ992zPlm +hpwsaPXpF/gxsxjE1kh9I0xowX67ARRvxdlu3rsEQmr49lx95dr6h+sNNVJn0J6X +dgWTP5XHAeZpVTh/EGGZyeNfpso+gmNIquIISD6q8rKFYqa0p9m9N5xotS1WfbC3 +P6CxB9bpT9zeRXEwMn8bLgn5v1Kh7sKAPgZcLlVAwRv1cEWw3F369nJad9Jjzc9Y +iQBCYz95OdBEsIJuQRno3eDBiFrRHnGTHyQwdOUeqN48Jzd/g66ed8/wMLH/S5no +xqE= +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Label: "Entrust Root Certification Authority" +# Serial: 1164660820 +# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 +# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 +# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c +-----BEGIN CERTIFICATE----- +MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 +Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW +KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl +cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw +NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw +NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy +ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV +BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo +Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 +4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 +KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI +rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi +94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB +sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi +gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo +kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE +vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA +A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t +O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua +AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP +9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ +eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m +0vdXcDazv/wor3ElhVsT/h5/WrQ8 +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc. +# Subject: CN=GeoTrust Global CA O=GeoTrust Inc. +# Label: "GeoTrust Global CA" +# Serial: 144470 +# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5 +# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12 +# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a +-----BEGIN CERTIFICATE----- +MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT +MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i +YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG +EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg +R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9 +9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq +fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv +iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU +1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+ +bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW +MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA +ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l +uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn +Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS +tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF +PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un +hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV +5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw== +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Global CA 2 O=GeoTrust Inc. +# Subject: CN=GeoTrust Global CA 2 O=GeoTrust Inc. +# Label: "GeoTrust Global CA 2" +# Serial: 1 +# MD5 Fingerprint: 0e:40:a7:6c:de:03:5d:8f:d1:0f:e4:d1:8d:f9:6c:a9 +# SHA1 Fingerprint: a9:e9:78:08:14:37:58:88:f2:05:19:b0:6d:2b:0d:2b:60:16:90:7d +# SHA256 Fingerprint: ca:2d:82:a0:86:77:07:2f:8a:b6:76:4f:f0:35:67:6c:fe:3e:5e:32:5e:01:21:72:df:3f:92:09:6d:b7:9b:85 +-----BEGIN CERTIFICATE----- +MIIDZjCCAk6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBEMQswCQYDVQQGEwJVUzEW +MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3QgR2xvYmFs +IENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMTkwMzA0MDUwMDAwWjBEMQswCQYDVQQG +EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3Qg +R2xvYmFsIENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDvPE1A +PRDfO1MA4Wf+lGAVPoWI8YkNkMgoI5kF6CsgncbzYEbYwbLVjDHZ3CB5JIG/NTL8 +Y2nbsSpr7iFY8gjpeMtvy/wWUsiRxP89c96xPqfCfWbB9X5SJBri1WeR0IIQ13hL +TytCOb1kLUCgsBDTOEhGiKEMuzozKmKY+wCdE1l/bztyqu6mD4b5BWHqZ38MN5aL +5mkWRxHCJ1kDs6ZgwiFAVvqgx306E+PsV8ez1q6diYD3Aecs9pYrEw15LNnA5IZ7 +S4wMcoKK+xfNAGw6EzywhIdLFnopsk/bHdQL82Y3vdj2V7teJHq4PIu5+pIaGoSe +2HSPqht/XvT+RSIhAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE +FHE4NvICMVNHK266ZUapEBVYIAUJMB8GA1UdIwQYMBaAFHE4NvICMVNHK266ZUap +EBVYIAUJMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQUFAAOCAQEAA/e1K6td +EPx7srJerJsOflN4WT5CBP51o62sgU7XAotexC3IUnbHLB/8gTKY0UvGkpMzNTEv +/NgdRN3ggX+d6YvhZJFiCzkIjKx0nVnZellSlxG5FntvRdOW2TF9AjYPnDtuzywN +A0ZF66D0f0hExghAzN4bcLUprbqLOzRldRtxIR0sFAqwlpW41uryZfspuk/qkZN0 +abby/+Ea0AzRdoXLiiW9l14sbxWZJue2Kf8i7MkCx1YAzUm5s2x7UwQa4qjJqhIF +I8LO57sEAszAR6LkxCkvW0VXiVHuPOtSCP8HNR6fNWpHSlaY0VqFH4z1Ir+rzoPz +4iIprn2DQKi6bA== +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc. +# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc. +# Label: "GeoTrust Universal CA" +# Serial: 1 +# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48 +# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79 +# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12 +-----BEGIN CERTIFICATE----- +MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW +MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy +c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE +BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0 +IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV +VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8 +cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT +QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh +F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v +c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w +mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd +VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX +teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ +f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe +Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+ +nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB +/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY +MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG +9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc +aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX +IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn +ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z +uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN +Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja +QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW +koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9 +ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt +DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm +bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw= +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. +# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. +# Label: "GeoTrust Universal CA 2" +# Serial: 1 +# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7 +# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79 +# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b +-----BEGIN CERTIFICATE----- +MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW +MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy +c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD +VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1 +c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81 +WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG +FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq +XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL +se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb +KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd +IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73 +y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt +hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc +QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4 +Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV +HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ +KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z +dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ +L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr +Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo +ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY +T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz +GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m +1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV +OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH +6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX +QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS +-----END CERTIFICATE----- + +# Issuer: CN=America Online Root Certification Authority 1 O=America Online Inc. +# Subject: CN=America Online Root Certification Authority 1 O=America Online Inc. +# Label: "America Online Root Certification Authority 1" +# Serial: 1 +# MD5 Fingerprint: 14:f1:08:ad:9d:fa:64:e2:89:e7:1c:cf:a8:ad:7d:5e +# SHA1 Fingerprint: 39:21:c1:15:c1:5d:0e:ca:5c:cb:5b:c4:f0:7d:21:d8:05:0b:56:6a +# SHA256 Fingerprint: 77:40:73:12:c6:3a:15:3d:5b:c0:0b:4e:51:75:9c:df:da:c2:37:dc:2a:33:b6:79:46:e9:8e:9b:fa:68:0a:e3 +-----BEGIN CERTIFICATE----- +MIIDpDCCAoygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEc +MBoGA1UEChMTQW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBP +bmxpbmUgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAxMB4XDTAyMDUyODA2 +MDAwMFoXDTM3MTExOTIwNDMwMFowYzELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0Ft +ZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2EgT25saW5lIFJvb3Qg +Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMTCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBAKgv6KRpBgNHw+kqmP8ZonCaxlCyfqXfaE0bfA+2l2h9LaaLl+lk +hsmj76CGv2BlnEtUiMJIxUo5vxTjWVXlGbR0yLQFOVwWpeKVBeASrlmLojNoWBym +1BW32J/X3HGrfpq/m44zDyL9Hy7nBzbvYjnF3cu6JRQj3gzGPTzOggjmZj7aUTsW +OqMFf6Dch9Wc/HKpoH145LcxVR5lu9RhsCFg7RAycsWSJR74kEoYeEfffjA3PlAb +2xzTa5qGUwew76wGePiEmf4hjUyAtgyC9mZweRrTT6PP8c9GsEsPPt2IYriMqQko +O3rHl+Ee5fSfwMCuJKDIodkP1nsmgmkyPacCAwEAAaNjMGEwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUAK3Zo/Z59m50qX8zPYEX10zPM94wHwYDVR0jBBgwFoAU +AK3Zo/Z59m50qX8zPYEX10zPM94wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB +BQUAA4IBAQB8itEfGDeC4Liwo+1WlchiYZwFos3CYiZhzRAW18y0ZTTQEYqtqKkF +Zu90821fnZmv9ov761KyBZiibyrFVL0lvV+uyIbqRizBs73B6UlwGBaXCBOMIOAb +LjpHyx7kADCVW/RFo8AasAFOq73AI25jP4BKxQft3OJvx8Fi8eNy1gTIdGcL+oir +oQHIb/AUr9KZzVGTfu0uOMe9zkZQPXLjeSWdm4grECDdpbgyn43gKd8hdIaC2y+C +MMbHNYaz+ZZfRtsMRf3zUMNvxsNIrUam4SdHCh0Om7bCd39j8uB9Gr784N/Xx6ds +sPmuujz9dLQR6FgNgLzTqIA6me11zEZ7 +-----END CERTIFICATE----- + +# Issuer: CN=America Online Root Certification Authority 2 O=America Online Inc. +# Subject: CN=America Online Root Certification Authority 2 O=America Online Inc. +# Label: "America Online Root Certification Authority 2" +# Serial: 1 +# MD5 Fingerprint: d6:ed:3c:ca:e2:66:0f:af:10:43:0d:77:9b:04:09:bf +# SHA1 Fingerprint: 85:b5:ff:67:9b:0c:79:96:1f:c8:6e:44:22:00:46:13:db:17:92:84 +# SHA256 Fingerprint: 7d:3b:46:5a:60:14:e5:26:c0:af:fc:ee:21:27:d2:31:17:27:ad:81:1c:26:84:2d:00:6a:f3:73:06:cc:80:bd +-----BEGIN CERTIFICATE----- +MIIFpDCCA4ygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEc +MBoGA1UEChMTQW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBP +bmxpbmUgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAyMB4XDTAyMDUyODA2 +MDAwMFoXDTM3MDkyOTE0MDgwMFowYzELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0Ft +ZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2EgT25saW5lIFJvb3Qg +Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIP +ADCCAgoCggIBAMxBRR3pPU0Q9oyxQcngXssNt79Hc9PwVU3dxgz6sWYFas14tNwC +206B89enfHG8dWOgXeMHDEjsJcQDIPT/DjsS/5uN4cbVG7RtIuOx238hZK+GvFci +KtZHgVdEglZTvYYUAQv8f3SkWq7xuhG1m1hagLQ3eAkzfDJHA1zEpYNI9FdWboE2 +JxhP7JsowtS013wMPgwr38oE18aO6lhOqKSlGBxsRZijQdEt0sdtjRnxrXm3gT+9 +BoInLRBYBbV4Bbkv2wxrkJB+FFk4u5QkE+XRnRTf04JNRvCAOVIyD+OEsnpD8l7e +Xz8d3eOyG6ChKiMDbi4BFYdcpnV1x5dhvt6G3NRI270qv0pV2uh9UPu0gBe4lL8B +PeraunzgWGcXuVjgiIZGZ2ydEEdYMtA1fHkqkKJaEBEjNa0vzORKW6fIJ/KD3l67 +Xnfn6KVuY8INXWHQjNJsWiEOyiijzirplcdIz5ZvHZIlyMbGwcEMBawmxNJ10uEq +Z8A9W6Wa6897GqidFEXlD6CaZd4vKL3Ob5Rmg0gp2OpljK+T2WSfVVcmv2/LNzGZ +o2C7HK2JNDJiuEMhBnIMoVxtRsX6Kc8w3onccVvdtjc+31D1uAclJuW8tf48ArO3 ++L5DwYcRlJ4jbBeKuIonDFRH8KmzwICMoCfrHRnjB453cMor9H124HhnAgMBAAGj +YzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFE1FwWg4u3OpaaEg5+31IqEj +FNeeMB8GA1UdIwQYMBaAFE1FwWg4u3OpaaEg5+31IqEjFNeeMA4GA1UdDwEB/wQE +AwIBhjANBgkqhkiG9w0BAQUFAAOCAgEAZ2sGuV9FOypLM7PmG2tZTiLMubekJcmn +xPBUlgtk87FYT15R/LKXeydlwuXK5w0MJXti4/qftIe3RUavg6WXSIylvfEWK5t2 +LHo1YGwRgJfMqZJS5ivmae2p+DYtLHe/YUjRYwu5W1LtGLBDQiKmsXeu3mnFzccc +obGlHBD7GL4acN3Bkku+KVqdPzW+5X1R+FXgJXUjhx5c3LqdsKyzadsXg8n33gy8 +CNyRnqjQ1xU3c6U1uPx+xURABsPr+CKAXEfOAuMRn0T//ZoyzH1kUQ7rVyZ2OuMe +IjzCpjbdGe+n/BLzJsBZMYVMnNjP36TMzCmT/5RtdlwTCJfy7aULTd3oyWgOZtMA +DjMSW7yV5TKQqLPGbIOtd+6Lfn6xqavT4fG2wLHqiMDn05DpKJKUe2h7lyoKZy2F +AjgQ5ANh1NolNscIWC2hp1GvMApJ9aZphwctREZ2jirlmjvXGKL8nDgQzMY70rUX +Om/9riW99XJZZLF0KjhfGEzfz3EEWjbUvy+ZnOjZurGV5gJLIaFb1cFPj65pbVPb +AZO1XB4Y3WRayhgoPmMEEf0cjQAPuDffZ4qdZqkCapH/E8ovXYO8h5Ns3CRRFgQl +Zvqz2cK6Kb6aSDiCmfS/O0oxGfm/jiEzFMpPVF/7zvuPcX/9XhmgD0uRuMRUvAaw +RY8mkaKO/qk= +-----END CERTIFICATE----- + +# Issuer: CN=AAA Certificate Services O=Comodo CA Limited +# Subject: CN=AAA Certificate Services O=Comodo CA Limited +# Label: "Comodo AAA Services root" +# Serial: 1 +# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 +# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 +# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 +-----BEGIN CERTIFICATE----- +MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj +YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM +GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua +BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe +3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 +YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR +rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm +ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU +oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF +MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v +QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t +b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF +AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q +GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz +Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 +G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi +l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 +smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== +-----END CERTIFICATE----- + +# Issuer: CN=Secure Certificate Services O=Comodo CA Limited +# Subject: CN=Secure Certificate Services O=Comodo CA Limited +# Label: "Comodo Secure Services root" +# Serial: 1 +# MD5 Fingerprint: d3:d9:bd:ae:9f:ac:67:24:b3:c8:1b:52:e1:b9:a9:bd +# SHA1 Fingerprint: 4a:65:d5:f4:1d:ef:39:b8:b8:90:4a:4a:d3:64:81:33:cf:c7:a1:d1 +# SHA256 Fingerprint: bd:81:ce:3b:4f:65:91:d1:1a:67:b5:fc:7a:47:fd:ef:25:52:1b:f9:aa:4e:18:b9:e3:df:2e:34:a7:80:3b:e8 +-----BEGIN CERTIFICATE----- +MIIEPzCCAyegAwIBAgIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEkMCIGA1UEAwwbU2VjdXJlIENlcnRp +ZmljYXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVow +fjELMAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxJDAiBgNV +BAMMG1NlY3VyZSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBAMBxM4KK0HDrc4eCQNUd5MvJDkKQ+d40uaG6EfQlhfPM +cm3ye5drswfxdySRXyWP9nQ95IDC+DwN879A6vfIUtFyb+/Iq0G4bi4XKpVpDM3S +HpR7LZQdqnXXs5jLrLxkU0C8j6ysNstcrbvd4JQX7NFc0L/vpZXJkMWwrPsbQ996 +CF23uPJAGysnnlDOXmWCiIxe004MeuoIkbY2qitC++rCoznl2yY4rYsK7hljxxwk +3wN42ubqwUcaCwtGCd0C/N7Lh1/XMGNooa7cMqG6vv5Eq2i2pRcV/b3Vp6ea5EQz +6YiO/O1R65NxTq0B50SOqy3LqP4BSUjwwN3HaNiS/j0CAwEAAaOBxzCBxDAdBgNV +HQ4EFgQUPNiTiMLAggnMAZkGkyDpnnAJY08wDgYDVR0PAQH/BAQDAgEGMA8GA1Ud +EwEB/wQFMAMBAf8wgYEGA1UdHwR6MHgwO6A5oDeGNWh0dHA6Ly9jcmwuY29tb2Rv +Y2EuY29tL1NlY3VyZUNlcnRpZmljYXRlU2VydmljZXMuY3JsMDmgN6A1hjNodHRw +Oi8vY3JsLmNvbW9kby5uZXQvU2VjdXJlQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmww +DQYJKoZIhvcNAQEFBQADggEBAIcBbSMdflsXfcFhMs+P5/OKlFlm4J4oqF7Tt/Q0 +5qo5spcWxYJvMqTpjOev/e/C6LlLqqP05tqNZSH7uoDrJiiFGv45jN5bBAS0VPmj +Z55B+glSzAVIqMk/IQQezkhr/IXownuvf7fM+F86/TXGDe+X3EyrEeFryzHRbPtI +gKvcnDe4IRRLDXE97IMzbtFuMhbsmMcWi1mmNKsFVy2T96oTy9IT4rcuO81rUBcJ +aD61JlfutuC23bkpgHl9j6PwpCikFcSF9CfUa7/lXORlAnZUtOM3ZiTTGWHIUhDl +izeauan5Hb/qmZJhlv8BzaFfDbxxvA6sCx1HRR3B7Hzs/Sk= +-----END CERTIFICATE----- + +# Issuer: CN=Trusted Certificate Services O=Comodo CA Limited +# Subject: CN=Trusted Certificate Services O=Comodo CA Limited +# Label: "Comodo Trusted Services root" +# Serial: 1 +# MD5 Fingerprint: 91:1b:3f:6e:cd:9e:ab:ee:07:fe:1f:71:d2:b3:61:27 +# SHA1 Fingerprint: e1:9f:e3:0e:8b:84:60:9e:80:9b:17:0d:72:a8:c5:ba:6e:14:09:bd +# SHA256 Fingerprint: 3f:06:e5:56:81:d4:96:f5:be:16:9e:b5:38:9f:9f:2b:8f:f6:1e:17:08:df:68:81:72:48:49:cd:5d:27:cb:69 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIBATANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDElMCMGA1UEAwwcVHJ1c3RlZCBDZXJ0 +aWZpY2F0ZSBTZXJ2aWNlczAeFw0wNDAxMDEwMDAwMDBaFw0yODEyMzEyMzU5NTla +MH8xCzAJBgNVBAYTAkdCMRswGQYDVQQIDBJHcmVhdGVyIE1hbmNoZXN0ZXIxEDAO +BgNVBAcMB1NhbGZvcmQxGjAYBgNVBAoMEUNvbW9kbyBDQSBMaW1pdGVkMSUwIwYD +VQQDDBxUcnVzdGVkIENlcnRpZmljYXRlIFNlcnZpY2VzMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA33FvNlhTWvI2VFeAxHQIIO0Yfyod5jWaHiWsnOWW +fnJSoBVC21ndZHoa0Lh73TkVvFVIxO06AOoxEbrycXQaZ7jPM8yoMa+j49d/vzMt +TGo87IvDktJTdyR0nAducPy9C1t2ul/y/9c3S0pgePfw+spwtOpZqqPOSC+pw7IL +fhdyFgymBwwbOM/JYrc/oJOlh0Hyt3BAd9i+FHzjqMB6juljatEPmsbS9Is6FARW +1O24zG71++IsWL1/T2sr92AkWCTOJu80kTrV44HQsvAEAtdbtz6SrGsSivnkBbA7 +kUlcsutT6vifR4buv5XAwAaf0lteERv0xwQ1KdJVXOTt6wIDAQABo4HJMIHGMB0G +A1UdDgQWBBTFe1i97doladL3WRaoszLAeydb9DAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zCBgwYDVR0fBHwwejA8oDqgOIY2aHR0cDovL2NybC5jb21v +ZG9jYS5jb20vVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMuY3JsMDqgOKA2hjRo +dHRwOi8vY3JsLmNvbW9kby5uZXQvVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMu +Y3JsMA0GCSqGSIb3DQEBBQUAA4IBAQDIk4E7ibSvuIQSTI3S8NtwuleGFTQQuS9/ +HrCoiWChisJ3DFBKmwCL2Iv0QeLQg4pKHBQGsKNoBXAxMKdTmw7pSqBYaWcOrp32 +pSxBvzwGa+RZzG0Q8ZZvH9/0BAKkn0U+yNj6NkZEUD+Cl5EfKNsYEYwq5GWDVxIS +jBc/lDb+XbDABHcTuPQV1T84zJQ6VdCsmPW6AF/ghhmBeC8owH7TzEIK9a5QoNE+ +xqFx7D+gIIxmOom0jtTYsU0lR+4viMi14QVFwL4Ucd56/Y57fU0IlqUSc/Atyjcn +dBInTMu2l+nZrghtWjlA3QVHdWpaIbOjGM9O9y5Xt5hwXsjEeLBi +-----END CERTIFICATE----- + +# Issuer: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com +# Subject: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com +# Label: "UTN DATACorp SGC Root CA" +# Serial: 91374294542884689855167577680241077609 +# MD5 Fingerprint: b3:a5:3e:77:21:6d:ac:4a:c0:c9:fb:d5:41:3d:ca:06 +# SHA1 Fingerprint: 58:11:9f:0e:12:82:87:ea:50:fd:d9:87:45:6f:4f:78:dc:fa:d6:d4 +# SHA256 Fingerprint: 85:fb:2f:91:dd:12:27:5a:01:45:b6:36:53:4f:84:02:4a:d6:8b:69:b8:ee:88:68:4f:f7:11:37:58:05:b3:48 +-----BEGIN CERTIFICATE----- +MIIEXjCCA0agAwIBAgIQRL4Mi1AAIbQR0ypoBqmtaTANBgkqhkiG9w0BAQUFADCB +kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug +Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho +dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3Jw +IFNHQzAeFw05OTA2MjQxODU3MjFaFw0xOTA2MjQxOTA2MzBaMIGTMQswCQYDVQQG +EwJVUzELMAkGA1UECBMCVVQxFzAVBgNVBAcTDlNhbHQgTGFrZSBDaXR5MR4wHAYD +VQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxITAfBgNVBAsTGGh0dHA6Ly93d3cu +dXNlcnRydXN0LmNvbTEbMBkGA1UEAxMSVVROIC0gREFUQUNvcnAgU0dDMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3+5YEKIrblXEjr8uRgnn4AgPLit6 +E5Qbvfa2gI5lBZMAHryv4g+OGQ0SR+ysraP6LnD43m77VkIVni5c7yPeIbkFdicZ +D0/Ww5y0vpQZY/KmEQrrU0icvvIpOxboGqBMpsn0GFlowHDyUwDAXlCCpVZvNvlK +4ESGoE1O1kduSUrLZ9emxAW5jh70/P/N5zbgnAVssjMiFdC04MwXwLLA9P4yPykq +lXvY8qdOD1R8oQ2AswkDwf9c3V6aPryuvEeKaq5xyh+xKrhfQgUL7EYw0XILyulW +bfXv33i+Ybqypa4ETLyorGkVl73v67SMvzX41MPRKA5cOp9wGDMgd8SirwIDAQAB +o4GrMIGoMAsGA1UdDwQEAwIBxjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRT +MtGzz3/64PGgXYVOktKeRR20TzA9BgNVHR8ENjA0MDKgMKAuhixodHRwOi8vY3Js +LnVzZXJ0cnVzdC5jb20vVVROLURBVEFDb3JwU0dDLmNybDAqBgNVHSUEIzAhBggr +BgEFBQcDAQYKKwYBBAGCNwoDAwYJYIZIAYb4QgQBMA0GCSqGSIb3DQEBBQUAA4IB +AQAnNZcAiosovcYzMB4p/OL31ZjUQLtgyr+rFywJNn9Q+kHcrpY6CiM+iVnJowft +Gzet/Hy+UUla3joKVAgWRcKZsYfNjGjgaQPpxE6YsjuMFrMOoAyYUJuTqXAJyCyj +j98C5OBxOvG0I3KgqgHf35g+FFCgMSa9KOlaMCZ1+XtgHI3zzVAmbQQnmt/VDUVH +KWss5nbZqSl9Mt3JNjy9rjXxEZ4du5A/EkdOjtd+D2JzHVImOBwYSf0wdJrE5SIv +2MCN7ZF6TACPcn9d2t0bi0Vr591pl6jFVkwPDPafepE39peC4N1xaf92P2BNPM/3 +mfnGV/TJVTl4uix5yaaIK/QI +-----END CERTIFICATE----- + +# Issuer: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com +# Subject: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com +# Label: "UTN USERFirst Hardware Root CA" +# Serial: 91374294542884704022267039221184531197 +# MD5 Fingerprint: 4c:56:41:e5:0d:bb:2b:e8:ca:a3:ed:18:08:ad:43:39 +# SHA1 Fingerprint: 04:83:ed:33:99:ac:36:08:05:87:22:ed:bc:5e:46:00:e3:be:f9:d7 +# SHA256 Fingerprint: 6e:a5:47:41:d0:04:66:7e:ed:1b:48:16:63:4a:a3:a7:9e:6e:4b:96:95:0f:82:79:da:fc:8d:9b:d8:81:21:37 +-----BEGIN CERTIFICATE----- +MIIEdDCCA1ygAwIBAgIQRL4Mi1AAJLQR0zYq/mUK/TANBgkqhkiG9w0BAQUFADCB +lzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug +Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho +dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3Qt +SGFyZHdhcmUwHhcNOTkwNzA5MTgxMDQyWhcNMTkwNzA5MTgxOTIyWjCBlzELMAkG +A1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEe +MBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8v +d3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdh +cmUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCx98M4P7Sof885glFn +0G2f0v9Y8+efK+wNiVSZuTiZFvfgIXlIwrthdBKWHTxqctU8EGc6Oe0rE81m65UJ +M6Rsl7HoxuzBdXmcRl6Nq9Bq/bkqVRcQVLMZ8Jr28bFdtqdt++BxF2uiiPsA3/4a +MXcMmgF6sTLjKwEHOG7DpV4jvEWbe1DByTCP2+UretNb+zNAHqDVmBe8i4fDidNd +oI6yqqr2jmmIBsX6iSHzCJ1pLgkzmykNRg+MzEk0sGlRvfkGzWitZky8PqxhvQqI +DsjfPe58BEydCl5rkdbux+0ojatNh4lz0G6k0B4WixThdkQDf2Os5M1JnMWS9Ksy +oUhbAgMBAAGjgbkwgbYwCwYDVR0PBAQDAgHGMA8GA1UdEwEB/wQFMAMBAf8wHQYD +VR0OBBYEFKFyXyYbKJhDlV0HN9WFlp1L0sNFMEQGA1UdHwQ9MDswOaA3oDWGM2h0 +dHA6Ly9jcmwudXNlcnRydXN0LmNvbS9VVE4tVVNFUkZpcnN0LUhhcmR3YXJlLmNy +bDAxBgNVHSUEKjAoBggrBgEFBQcDAQYIKwYBBQUHAwUGCCsGAQUFBwMGBggrBgEF +BQcDBzANBgkqhkiG9w0BAQUFAAOCAQEARxkP3nTGmZev/K0oXnWO6y1n7k57K9cM +//bey1WiCuFMVGWTYGufEpytXoMs61quwOQt9ABjHbjAbPLPSbtNk28Gpgoiskli +CE7/yMgUsogWXecB5BKV5UU0s4tpvc+0hY91UZ59Ojg6FEgSxvunOxqNDYJAB+gE +CJChicsZUN/KHAG8HQQZexB2lzvukJDKxA4fFm517zP4029bHpbj4HR3dHuKom4t +3XbWOTCC8KucUvIqx69JXn7HaOWCgchqJ/kniCrVWFCVH/A7HFe7fRQ5YiuayZSS +KqMiDP+JJn1fIytH1xUdqWqeUQ0qUZ6B+dQ7XnASfxAynB67nfhmqA== +-----END CERTIFICATE----- + +# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Label: "XRamp Global CA Root" +# Serial: 107108908803651509692980124233745014957 +# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 +# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 +# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB +gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk +MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY +UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx +NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 +dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy +dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 +38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP +KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q +DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 +qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa +JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi +PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P +BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs +jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 +eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD +ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR +vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt +qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa +IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy +i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ +O+7ETPTsJ3xCwnR8gooJybQDJbw= +-----END CERTIFICATE----- + +# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Label: "Go Daddy Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 +# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 +# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 +-----BEGIN CERTIFICATE----- +MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh +MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE +YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 +MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo +ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg +MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN +ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA +PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w +wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi +EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY +avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ +YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE +sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h +/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 +IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD +ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy +OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P +TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ +HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER +dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf +ReYNnyicsbkqWletNw+vHX/bvZ8= +-----END CERTIFICATE----- + +# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Label: "Starfield Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 +# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a +# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 +-----BEGIN CERTIFICATE----- +MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl +MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp +U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw +NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE +ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp +ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 +DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf +8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN ++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 +X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa +K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA +1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G +A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR +zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 +YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD +bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w +DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 +L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D +eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl +xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp +VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY +WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= +-----END CERTIFICATE----- + +# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing +# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing +# Label: "StartCom Certification Authority" +# Serial: 1 +# MD5 Fingerprint: 22:4d:8f:8a:fc:f7:35:c2:bb:57:34:90:7b:8b:22:16 +# SHA1 Fingerprint: 3e:2b:f7:f2:03:1b:96:f3:8c:e6:c4:d8:a8:5d:3e:2d:58:47:6a:0f +# SHA256 Fingerprint: c7:66:a9:be:f2:d4:07:1c:86:3a:31:aa:49:20:e8:13:b2:d1:98:60:8c:b7:b7:cf:e2:11:43:b8:36:df:09:ea +-----BEGIN CERTIFICATE----- +MIIHyTCCBbGgAwIBAgIBATANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJJTDEW +MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg +Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM2WhcNMzYwOTE3MTk0NjM2WjB9 +MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi +U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh +cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk +pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf +OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C +Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT +Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi +HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM +Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w ++2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+ +Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3 +Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B +26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID +AQABo4ICUjCCAk4wDAYDVR0TBAUwAwEB/zALBgNVHQ8EBAMCAa4wHQYDVR0OBBYE +FE4L7xqkQFulF2mHMMo0aEPQQa7yMGQGA1UdHwRdMFswLKAqoCiGJmh0dHA6Ly9j +ZXJ0LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMCugKaAnhiVodHRwOi8vY3Js +LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMIIBXQYDVR0gBIIBVDCCAVAwggFM +BgsrBgEEAYG1NwEBATCCATswLwYIKwYBBQUHAgEWI2h0dHA6Ly9jZXJ0LnN0YXJ0 +Y29tLm9yZy9wb2xpY3kucGRmMDUGCCsGAQUFBwIBFilodHRwOi8vY2VydC5zdGFy +dGNvbS5vcmcvaW50ZXJtZWRpYXRlLnBkZjCB0AYIKwYBBQUHAgIwgcMwJxYgU3Rh +cnQgQ29tbWVyY2lhbCAoU3RhcnRDb20pIEx0ZC4wAwIBARqBl0xpbWl0ZWQgTGlh +YmlsaXR5LCByZWFkIHRoZSBzZWN0aW9uICpMZWdhbCBMaW1pdGF0aW9ucyogb2Yg +dGhlIFN0YXJ0Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFBvbGljeSBhdmFp +bGFibGUgYXQgaHR0cDovL2NlcnQuc3RhcnRjb20ub3JnL3BvbGljeS5wZGYwEQYJ +YIZIAYb4QgEBBAQDAgAHMDgGCWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNT +TCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTANBgkqhkiG9w0BAQUFAAOCAgEAFmyZ +9GYMNPXQhV59CuzaEE44HF7fpiUFS5Eyweg78T3dRAlbB0mKKctmArexmvclmAk8 +jhvh3TaHK0u7aNM5Zj2gJsfyOZEdUauCe37Vzlrk4gNXcGmXCPleWKYK34wGmkUW +FjgKXlf2Ysd6AgXmvB618p70qSmD+LIU424oh0TDkBreOKk8rENNZEXO3SipXPJz +ewT4F+irsfMuXGRuczE6Eri8sxHkfY+BUZo7jYn0TZNmezwD7dOaHZrzZVD1oNB1 +ny+v8OqCQ5j4aZyJecRDjkZy42Q2Eq/3JR44iZB3fsNrarnDy0RLrHiQi+fHLB5L +EUTINFInzQpdn4XBidUaePKVEFMy3YCEZnXZtWgo+2EuvoSoOMCZEoalHmdkrQYu +L6lwhceWD3yJZfWOQ1QOq92lgDmUYMA0yZZwLKMS9R9Ie70cfmu3nZD0Ijuu+Pwq +yvqCUqDvr0tVk+vBtfAii6w0TiYiBKGHLHVKt+V9E9e4DGTANtLJL4YSjCMJwRuC +O3NJo2pXh5Tl1njFmUNj403gdy3hZZlyaQQaRwnmDwFWJPsfvw55qVguucQJAX6V +um0ABj6y6koQOdjQK/W/7HW/lwLFCRsI3FU34oH7N4RDYiDK51ZLZer+bMEkkySh +NOsF/5oirpt9P/FlUQqmMGqz9IgcgA38corog14= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root CA" +# Serial: 17154717934120587862167794914071425081 +# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 +# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 +# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c +JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP +mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ +wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 +VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ +AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB +AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun +pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC +dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf +fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm +NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx +H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe ++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root CA" +# Serial: 10944719598952040374951832963794454346 +# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e +# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 +# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD +QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB +CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 +nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt +43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P +T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 +gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR +TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw +DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr +hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg +06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF +PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls +YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert High Assurance EV Root CA" +# Serial: 3553400076410547919724730734378100087 +# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a +# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 +# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j +ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 +LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug +RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm ++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW +PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM +xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB +Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 +hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg +EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA +FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec +nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z +eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF +hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 +Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep ++OkuE6N36B9K +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. +# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. +# Label: "GeoTrust Primary Certification Authority" +# Serial: 32798226551256963324313806436981982369 +# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf +# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96 +# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c +-----BEGIN CERTIFICATE----- +MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY +MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo +R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx +MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK +Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9 +AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA +ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0 +7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W +kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI +mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ +KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1 +6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl +4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K +oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj +UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU +AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk= +-----END CERTIFICATE----- + +# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only +# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only +# Label: "thawte Primary Root CA" +# Serial: 69529181992039203566298953787712940909 +# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12 +# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81 +# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f +-----BEGIN CERTIFICATE----- +MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB +qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf +Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw +MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV +BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw +NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j +LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG +A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl +IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs +W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta +3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk +6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6 +Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J +NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA +MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP +r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU +DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz +YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX +xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2 +/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/ +LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7 +jVaMaA== +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only +# Label: "VeriSign Class 3 Public Primary Certification Authority - G5" +# Serial: 33037644167568058970164719475676101450 +# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c +# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5 +# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df +-----BEGIN CERTIFICATE----- +MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB +yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL +ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp +U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW +ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL +MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW +ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln +biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp +U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y +aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1 +nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex +t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz +SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG +BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+ +rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/ +NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E +BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH +BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy +aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv +MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE +p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y +5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK +WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ +4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N +hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq +-----END CERTIFICATE----- + +# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO Certification Authority O=COMODO CA Limited +# Label: "COMODO Certification Authority" +# Serial: 104350513648249232941998508985834464573 +# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 +# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b +# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB +gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV +BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw +MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl +YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P +RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 +UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI +2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 +Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp ++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ +DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O +nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW +/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g +PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u +QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY +SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv +IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 +zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd +BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB +ZQ== +-----END CERTIFICATE----- + +# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Label: "Network Solutions Certificate Authority" +# Serial: 116697915152937497490437556386812487904 +# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e +# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce +# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c +-----BEGIN CERTIFICATE----- +MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi +MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu +MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp +dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV +UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO +ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz +c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP +OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl +mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF +BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 +qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw +gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB +BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu +bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp +dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 +6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ +h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH +/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv +wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN +pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey +-----END CERTIFICATE----- + +# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Label: "COMODO ECC Certification Authority" +# Serial: 41578283867086692638256921589707938090 +# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 +# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 +# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT +IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw +MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy +ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N +T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR +FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J +cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW +BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm +fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv +GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +# Issuer: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA +# Subject: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA +# Label: "TC TrustCenter Class 2 CA II" +# Serial: 941389028203453866782103406992443 +# MD5 Fingerprint: ce:78:33:5c:59:78:01:6e:18:ea:b9:36:a0:b9:2e:23 +# SHA1 Fingerprint: ae:50:83:ed:7c:f4:5c:bc:8f:61:c6:21:fe:68:5d:79:42:21:15:6e +# SHA256 Fingerprint: e6:b8:f8:76:64:85:f8:07:ae:7f:8d:ac:16:70:46:1f:07:c0:a1:3e:ef:3a:1f:f7:17:53:8d:7a:ba:d3:91:b4 +-----BEGIN CERTIFICATE----- +MIIEqjCCA5KgAwIBAgIOLmoAAQACH9dSISwRXDswDQYJKoZIhvcNAQEFBQAwdjEL +MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV +BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDIgQ0ExJTAjBgNVBAMTHFRDIFRydXN0 +Q2VudGVyIENsYXNzIDIgQ0EgSUkwHhcNMDYwMTEyMTQzODQzWhcNMjUxMjMxMjI1 +OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i +SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQTElMCMGA1UEAxMc +VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD +ggEPADCCAQoCggEBAKuAh5uO8MN8h9foJIIRszzdQ2Lu+MNF2ujhoF/RKrLqk2jf +tMjWQ+nEdVl//OEd+DFwIxuInie5e/060smp6RQvkL4DUsFJzfb95AhmC1eKokKg +uNV/aVyQMrKXDcpK3EY+AlWJU+MaWss2xgdW94zPEfRMuzBwBJWl9jmM/XOBCH2J +XjIeIqkiRUuwZi4wzJ9l/fzLganx4Duvo4bRierERXlQXa7pIXSSTYtZgo+U4+lK +8edJsBTj9WLL1XK9H7nSn6DNqPoByNkN39r8R52zyFTfSUrxIan+GE7uSNQZu+99 +5OKdy1u2bv/jzVrndIIFuoAlOMvkaZ6vQaoahPUCAwEAAaOCATQwggEwMA8GA1Ud +EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTjq1RMgKHbVkO3 +kUrL84J6E1wIqzCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy +dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18yX2NhX0lJLmNybIaBn2xkYXA6 +Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz +JTIwMiUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290 +Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u +TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEAjNfffu4bgBCzg/XbEeprS6iS +GNn3Bzn1LL4GdXpoUxUc6krtXvwjshOg0wn/9vYua0Fxec3ibf2uWWuFHbhOIprt +ZjluS5TmVfwLG4t3wVMTZonZKNaL80VKY7f9ewthXbhtvsPcW3nS7Yblok2+XnR8 +au0WOB9/WIFaGusyiC2y8zl3gK9etmF1KdsjTYjKUCjLhdLTEKJZbtOTVAB6okaV +hgWcqRmY5TFyDADiZ9lA4CQze28suVyrZZ0srHbqNZn1l7kPJOzHdiEoZa5X6AeI +dUpWoNIFOqTmjZKILPPy4cHGYdtBxceb9w4aUUXCYWvcZCcXjFq32nQozZfkvQ== +-----END CERTIFICATE----- + +# Issuer: CN=TC TrustCenter Class 3 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 3 CA +# Subject: CN=TC TrustCenter Class 3 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 3 CA +# Label: "TC TrustCenter Class 3 CA II" +# Serial: 1506523511417715638772220530020799 +# MD5 Fingerprint: 56:5f:aa:80:61:12:17:f6:67:21:e6:2b:6d:61:56:8e +# SHA1 Fingerprint: 80:25:ef:f4:6e:70:c8:d4:72:24:65:84:fe:40:3b:8a:8d:6a:db:f5 +# SHA256 Fingerprint: 8d:a0:84:fc:f9:9c:e0:77:22:f8:9b:32:05:93:98:06:fa:5c:b8:11:e1:c8:13:f6:a1:08:c7:d3:36:b3:40:8e +-----BEGIN CERTIFICATE----- +MIIEqjCCA5KgAwIBAgIOSkcAAQAC5aBd1j8AUb8wDQYJKoZIhvcNAQEFBQAwdjEL +MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV +BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDMgQ0ExJTAjBgNVBAMTHFRDIFRydXN0 +Q2VudGVyIENsYXNzIDMgQ0EgSUkwHhcNMDYwMTEyMTQ0MTU3WhcNMjUxMjMxMjI1 +OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i +SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQTElMCMGA1UEAxMc +VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD +ggEPADCCAQoCggEBALTgu1G7OVyLBMVMeRwjhjEQY0NVJz/GRcekPewJDRoeIMJW +Ht4bNwcwIi9v8Qbxq63WyKthoy9DxLCyLfzDlml7forkzMA5EpBCYMnMNWju2l+Q +Vl/NHE1bWEnrDgFPZPosPIlY2C8u4rBo6SI7dYnWRBpl8huXJh0obazovVkdKyT2 +1oQDZogkAHhg8fir/gKya/si+zXmFtGt9i4S5Po1auUZuV3bOx4a+9P/FRQI2Alq +ukWdFHlgfa9Aigdzs5OW03Q0jTo3Kd5c7PXuLjHCINy+8U9/I1LZW+Jk2ZyqBwi1 +Rb3R0DHBq1SfqdLDYmAD8bs5SpJKPQq5ncWg/jcCAwEAAaOCATQwggEwMA8GA1Ud +EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTUovyfs8PYA9NX +XAek0CSnwPIA1DCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy +dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18zX2NhX0lJLmNybIaBn2xkYXA6 +Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz +JTIwMyUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290 +Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u +TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEANmDkcPcGIEPZIxpC8vijsrlN +irTzwppVMXzEO2eatN9NDoqTSheLG43KieHPOh6sHfGcMrSOWXaiQYUlN6AT0PV8 +TtXqluJucsG7Kv5sbviRmEb8yRtXW+rIGjs/sFGYPAfaLFkB2otE6OF0/ado3VS6 +g0bsyEa1+K+XwDsJHI/OcpY9M1ZwvJbL2NV9IJqDnxrcOfHFcqMRA/07QlIp2+gB +95tejNaNhk4Z+rwcvsUhpYeeeC422wlxo3I0+GzjBgnyXlal092Y+tTmBvTwtiBj +S+opvaqCZh77gaqnN60TGOaSw4HBM7uIHqHn4rS9MWwOUT1v+5ZWgOI2F9Hc5A== +-----END CERTIFICATE----- + +# Issuer: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA +# Subject: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA +# Label: "TC TrustCenter Universal CA I" +# Serial: 601024842042189035295619584734726 +# MD5 Fingerprint: 45:e1:a5:72:c5:a9:36:64:40:9e:f5:e4:58:84:67:8c +# SHA1 Fingerprint: 6b:2f:34:ad:89:58:be:62:fd:b0:6b:5c:ce:bb:9d:d9:4f:4e:39:f3 +# SHA256 Fingerprint: eb:f3:c0:2a:87:89:b1:fb:7d:51:19:95:d6:63:b7:29:06:d9:13:ce:0d:5e:10:56:8a:8a:77:e2:58:61:67:e7 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIOHaIAAQAC7LdggHiNtgYwDQYJKoZIhvcNAQEFBQAweTEL +MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV +BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEmMCQGA1UEAxMdVEMgVHJ1 +c3RDZW50ZXIgVW5pdmVyc2FsIENBIEkwHhcNMDYwMzIyMTU1NDI4WhcNMjUxMjMx +MjI1OTU5WjB5MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIg +R21iSDEkMCIGA1UECxMbVEMgVHJ1c3RDZW50ZXIgVW5pdmVyc2FsIENBMSYwJAYD +VQQDEx1UQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0EgSTCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAKR3I5ZEr5D0MacQ9CaHnPM42Q9e3s9B6DGtxnSR +JJZ4Hgmgm5qVSkr1YnwCqMqs+1oEdjneX/H5s7/zA1hV0qq34wQi0fiU2iIIAI3T +fCZdzHd55yx4Oagmcw6iXSVphU9VDprvxrlE4Vc93x9UIuVvZaozhDrzznq+VZeu +jRIPFDPiUHDDSYcTvFHe15gSWu86gzOSBnWLknwSaHtwag+1m7Z3W0hZneTvWq3z +wZ7U10VOylY0Ibw+F1tvdwxIAUMpsN0/lm7mlaoMwCC2/T42J5zjXM9OgdwZu5GQ +fezmlwQek8wiSdeXhrYTCjxDI3d+8NzmzSQfO4ObNDqDNOMCAwEAAaNjMGEwHwYD +VR0jBBgwFoAUkqR1LKSevoFE63n8isWVpesQdXMwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFJKkdSyknr6BROt5/IrFlaXrEHVzMA0G +CSqGSIb3DQEBBQUAA4IBAQAo0uCG1eb4e/CX3CJrO5UUVg8RMKWaTzqwOuAGy2X1 +7caXJ/4l8lfmXpWMPmRgFVp/Lw0BxbFg/UU1z/CyvwbZ71q+s2IhtNerNXxTPqYn +8aEt2hojnczd7Dwtnic0XQ/CNnm8yUpiLe1r2X1BQ3y2qsrtYbE3ghUJGooWMNjs +ydZHcnhLEEYUjl8Or+zHL6sQ17bxbuyGssLoDZJz3KL0Dzq/YSMQiZxIQG5wALPT +ujdEWBF6AmqI8Dc08BnprNRlc/ZpjGSUOnmFKbAWKwyCPwacx/0QK54PLLae4xW/ +2TYcuiUaUj0a7CIMHOCkoj3w6DnPgcB77V0fb8XQC9eY +-----END CERTIFICATE----- + +# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc +# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc +# Label: "Cybertrust Global Root" +# Serial: 4835703278459682877484360 +# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1 +# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6 +# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3 +-----BEGIN CERTIFICATE----- +MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG +A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh +bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE +ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS +b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5 +7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS +J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y +HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP +t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz +FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY +XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/ +MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw +hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js +MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA +A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj +Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx +XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o +omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc +A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW +WL1WMRJOEcgh4LMRkWXbtKaIOM5V +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only +# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only +# Label: "GeoTrust Primary Certification Authority - G3" +# Serial: 28809105769928564313984085209975885599 +# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05 +# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd +# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4 +-----BEGIN CERTIFICATE----- +MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB +mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT +MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s +eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv +cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ +BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg +MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0 +BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg +LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz ++uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm +hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn +5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W +JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL +DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC +huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw +HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB +AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB +zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN +kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD +AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH +SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G +spki4cErx5z481+oghLrGREt +-----END CERTIFICATE----- + +# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only +# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only +# Label: "thawte Primary Root CA - G2" +# Serial: 71758320672825410020661621085256472406 +# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f +# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12 +# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57 +-----BEGIN CERTIFICATE----- +MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp +IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi +BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw +MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh +d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig +YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v +dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/ +BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6 +papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K +DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3 +KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox +XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg== +-----END CERTIFICATE----- + +# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only +# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only +# Label: "thawte Primary Root CA - G3" +# Serial: 127614157056681299805556476275995414779 +# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31 +# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2 +# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB +rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf +Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw +MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV +BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa +Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl +LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u +MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl +ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm +gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8 +YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf +b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9 +9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S +zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk +OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV +HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA +2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW +oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu +t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c +KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM +m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu +MdRAGmI0Nj81Aa6sY6A= +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only +# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only +# Label: "GeoTrust Primary Certification Authority - G2" +# Serial: 80682863203381065782177908751794619243 +# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a +# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0 +# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66 +-----BEGIN CERTIFICATE----- +MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL +MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj +KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2 +MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 +eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV +BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw +NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV +BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH +MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL +So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal +tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG +CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT +qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz +rD6ogRLQy7rQkgu2npaqBA+K +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only +# Label: "VeriSign Universal Root Certification Authority" +# Serial: 85209574734084581917763752644031726877 +# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19 +# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54 +# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c +-----BEGIN CERTIFICATE----- +MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB +vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL +ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp +U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W +ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX +MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0 +IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y +IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh +bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF +AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF +9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH +H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H +LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN +/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT +rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud +EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw +WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs +exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud +DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4 +sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+ +seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz +4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+ +BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR +lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3 +7M2CYfE45k+XmCpajQ== +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only +# Label: "VeriSign Class 3 Public Primary Certification Authority - G4" +# Serial: 63143484348153506665311985501458640051 +# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41 +# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a +# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79 +-----BEGIN CERTIFICATE----- +MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL +MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW +ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln +biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp +U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y +aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG +A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp +U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg +SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln +biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5 +IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm +GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve +fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ +aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj +aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW +kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC +4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga +FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA== +-----END CERTIFICATE----- + +# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority +# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority +# Label: "Verisign Class 3 Public Primary Certification Authority" +# Serial: 80507572722862485515306429940691309246 +# MD5 Fingerprint: ef:5a:f1:33:ef:f1:cd:bb:51:02:ee:12:14:4b:96:c4 +# SHA1 Fingerprint: a1:db:63:93:91:6f:17:e4:18:55:09:40:04:15:c7:02:40:b0:ae:6b +# SHA256 Fingerprint: a4:b6:b3:99:6f:c2:f3:06:b3:fd:86:81:bd:63:41:3d:8c:50:09:cc:4f:a3:29:c2:cc:f0:e2:fa:1b:14:03:05 +-----BEGIN CERTIFICATE----- +MIICPDCCAaUCEDyRMcsf9tAbDpq40ES/Er4wDQYJKoZIhvcNAQEFBQAwXzELMAkG +A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz +cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2 +MDEyOTAwMDAwMFoXDTI4MDgwMjIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV +BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt +YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN +ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE +BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is +I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G +CSqGSIb3DQEBBQUAA4GBABByUqkFFBkyCEHwxWsKzH4PIRnN5GfcX6kb5sroc50i +2JhucwNhkcV8sEVAbkSdjbCxlnRhLQ2pRdKkkirWmnWXbj9T/UWZYB2oK0z5XqcJ +2HUw19JlYD1n1khVdWk/kfVIC0dpImmClr7JyDiGSnoscxlIaU5rfGW/D/xwzoiQ +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Label: "GlobalSign Root CA - R3" +# Serial: 4835703278459759426209954 +# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 +# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad +# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f +-----END CERTIFICATE----- + +# Issuer: CN=TC TrustCenter Universal CA III O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA +# Subject: CN=TC TrustCenter Universal CA III O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA +# Label: "TC TrustCenter Universal CA III" +# Serial: 2010889993983507346460533407902964 +# MD5 Fingerprint: 9f:dd:db:ab:ff:8e:ff:45:21:5f:f0:6c:9d:8f:fe:2b +# SHA1 Fingerprint: 96:56:cd:7b:57:96:98:95:d0:e1:41:46:68:06:fb:b8:c6:11:06:87 +# SHA256 Fingerprint: 30:9b:4a:87:f6:ca:56:c9:31:69:aa:a9:9c:6d:98:88:54:d7:89:2b:d5:43:7e:2d:07:b2:9c:be:da:55:d3:5d +-----BEGIN CERTIFICATE----- +MIID4TCCAsmgAwIBAgIOYyUAAQACFI0zFQLkbPQwDQYJKoZIhvcNAQEFBQAwezEL +MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV +BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEoMCYGA1UEAxMfVEMgVHJ1 +c3RDZW50ZXIgVW5pdmVyc2FsIENBIElJSTAeFw0wOTA5MDkwODE1MjdaFw0yOTEy +MzEyMzU5NTlaMHsxCzAJBgNVBAYTAkRFMRwwGgYDVQQKExNUQyBUcnVzdENlbnRl +ciBHbWJIMSQwIgYDVQQLExtUQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0ExKDAm +BgNVBAMTH1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQSBJSUkwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDC2pxisLlxErALyBpXsq6DFJmzNEubkKLF +5+cvAqBNLaT6hdqbJYUtQCggbergvbFIgyIpRJ9Og+41URNzdNW88jBmlFPAQDYv +DIRlzg9uwliT6CwLOunBjvvya8o84pxOjuT5fdMnnxvVZ3iHLX8LR7PH6MlIfK8v +zArZQe+f/prhsq75U7Xl6UafYOPfjdN/+5Z+s7Vy+EutCHnNaYlAJ/Uqwa1D7KRT +yGG299J5KmcYdkhtWyUB0SbFt1dpIxVbYYqt8Bst2a9c8SaQaanVDED1M4BDj5yj +dipFtK+/fz6HP3bFzSreIMUWWMv5G/UPyw0RUmS40nZid4PxWJ//AgMBAAGjYzBh +MB8GA1UdIwQYMBaAFFbn4VslQ4Dg9ozhcbyO5YAvxEjiMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRW5+FbJUOA4PaM4XG8juWAL8RI +4jANBgkqhkiG9w0BAQUFAAOCAQEAg8ev6n9NCjw5sWi+e22JLumzCecYV42Fmhfz +dkJQEw/HkG8zrcVJYCtsSVgZ1OK+t7+rSbyUyKu+KGwWaODIl0YgoGhnYIg5IFHY +aAERzqf2EQf27OysGh+yZm5WZ2B6dF7AbZc2rrUNXWZzwCUyRdhKBgePxLcHsU0G +DeGl6/R1yrqc0L2z0zIkTO5+4nYES0lT2PLpVDP85XEfPRRclkvxOvIAu2y0+pZV +CIgJwcyRGSmwIC3/yzikQOEXvnlhgP8HA4ZMTnsGnxGGjYnuJ8Tb4rwZjgvDwxPH +LQNjO9Po5KIqwoIIlBZU8O8fJ5AluA0OKBtHd0e9HKgl8ZS0Zg== +-----END CERTIFICATE----- + +# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Label: "Go Daddy Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 +# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b +# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT +EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp +ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz +NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH +EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE +AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD +E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH +/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy +DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh +GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR +tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA +AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX +WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu +9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr +gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo +2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI +4uJEvlz36hz1 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 +# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e +# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs +ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw +MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj +aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp +Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg +nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 +HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N +Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN +dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 +HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G +CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU +sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 +4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg +8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 +mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Services Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 +# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f +# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs +ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy +ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy +dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p +OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 +8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K +Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe +hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk +6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q +AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI +bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB +ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z +qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn +0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN +sSi6 +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Commercial O=AffirmTrust +# Subject: CN=AffirmTrust Commercial O=AffirmTrust +# Label: "AffirmTrust Commercial" +# Serial: 8608355977964138876 +# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 +# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 +# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP +Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr +ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL +MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 +yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr +VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ +nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG +XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj +vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt +Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g +N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC +nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Networking O=AffirmTrust +# Subject: CN=AffirmTrust Networking O=AffirmTrust +# Label: "AffirmTrust Networking" +# Serial: 8957382827206547757 +# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f +# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f +# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y +YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua +kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL +QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp +6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG +yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i +QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO +tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu +QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ +Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u +olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 +x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium O=AffirmTrust +# Subject: CN=AffirmTrust Premium O=AffirmTrust +# Label: "AffirmTrust Premium" +# Serial: 7893706540734352110 +# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 +# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 +# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz +dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG +A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U +cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf +qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ +JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ ++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS +s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 +HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 +70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG +V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S +qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S +5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia +C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX +OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE +FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 +KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg +Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B +8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ +MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc +0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ +u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF +u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH +YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 +GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO +RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e +KeC2uAloGRwYQw== +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust +# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust +# Label: "AffirmTrust Premium ECC" +# Serial: 8401224907861490260 +# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d +# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb +# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 +-----BEGIN CERTIFICATE----- +MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC +VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ +cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ +BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt +VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D +0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 +ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G +A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs +aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I +flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== +-----END CERTIFICATE----- + +# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing +# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing +# Label: "StartCom Certification Authority" +# Serial: 45 +# MD5 Fingerprint: c9:3b:0d:84:41:fc:a4:76:79:23:08:57:de:10:19:16 +# SHA1 Fingerprint: a3:f1:33:3f:e2:42:bf:cf:c5:d1:4e:8f:39:42:98:40:68:10:d1:a0 +# SHA256 Fingerprint: e1:78:90:ee:09:a3:fb:f4:f4:8b:9c:41:4a:17:d6:37:b7:a5:06:47:e9:bc:75:23:22:72:7f:cc:17:42:a9:11 +-----BEGIN CERTIFICATE----- +MIIHhzCCBW+gAwIBAgIBLTANBgkqhkiG9w0BAQsFADB9MQswCQYDVQQGEwJJTDEW +MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg +Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM3WhcNMzYwOTE3MTk0NjM2WjB9 +MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi +U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh +cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk +pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf +OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C +Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT +Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi +HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM +Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w ++2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+ +Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3 +Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B +26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID +AQABo4ICEDCCAgwwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFE4L7xqkQFulF2mHMMo0aEPQQa7yMB8GA1UdIwQYMBaAFE4L7xqkQFul +F2mHMMo0aEPQQa7yMIIBWgYDVR0gBIIBUTCCAU0wggFJBgsrBgEEAYG1NwEBATCC +ATgwLgYIKwYBBQUHAgEWImh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL3BvbGljeS5w +ZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL2ludGVybWVk +aWF0ZS5wZGYwgc8GCCsGAQUFBwICMIHCMCcWIFN0YXJ0IENvbW1lcmNpYWwgKFN0 +YXJ0Q29tKSBMdGQuMAMCAQEagZZMaW1pdGVkIExpYWJpbGl0eSwgcmVhZCB0aGUg +c2VjdGlvbiAqTGVnYWwgTGltaXRhdGlvbnMqIG9mIHRoZSBTdGFydENvbSBDZXJ0 +aWZpY2F0aW9uIEF1dGhvcml0eSBQb2xpY3kgYXZhaWxhYmxlIGF0IGh0dHA6Ly93 +d3cuc3RhcnRzc2wuY29tL3BvbGljeS5wZGYwEQYJYIZIAYb4QgEBBAQDAgAHMDgG +CWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNTTCBDZXJ0aWZpY2F0aW9uIEF1 +dGhvcml0eTANBgkqhkiG9w0BAQsFAAOCAgEAjo/n3JR5fPGFf59Jb2vKXfuM/gTF +wWLRfUKKvFO3lANmMD+x5wqnUCBVJX92ehQN6wQOQOY+2IirByeDqXWmN3PH/UvS +Ta0XQMhGvjt/UfzDtgUx3M2FIk5xt/JxXrAaxrqTi3iSSoX4eA+D/i+tLPfkpLst +0OcNOrg+zvZ49q5HJMqjNTbOx8aHmNrs++myziebiMMEofYLWWivydsQD032ZGNc +pRJvkrKTlMeIFw6Ttn5ii5B/q06f/ON1FE8qMt9bDeD1e5MNq6HPh+GlBEXoPBKl +CcWw0bdT82AUuoVpaiF8H3VhFyAXe2w7QSlc4axa0c2Mm+tgHRns9+Ww2vl5GKVF +P0lDV9LdJNUso/2RjSe15esUBppMeyG7Oq0wBhjA2MFrLH9ZXF2RsXAiV+uKa0hK +1Q8p7MZAwC+ITGgBF3f0JBlPvfrhsiAhS90a2Cl9qrjeVOwhVYBsHvUwyKMQ5bLm +KhQxw4UtjJixhlpPiVktucf3HMiKf8CdBUrmQk9io20ppB+Fq9vlgcitKj1MXVuE +JnHEhV5xJMqlG2zYYdMa4FTbzrqpMrUi9nNBCV24F10OD5mQ1kfabwo6YigUZ4LZ +8dCAWZvLMdibD4x3TrVoivJs9iQOLWxwxXPR3hTQcY+203sC9uO41Alua551hDnm +fyWl8kgAwKQB2j8= +-----END CERTIFICATE----- + +# Issuer: CN=StartCom Certification Authority G2 O=StartCom Ltd. +# Subject: CN=StartCom Certification Authority G2 O=StartCom Ltd. +# Label: "StartCom Certification Authority G2" +# Serial: 59 +# MD5 Fingerprint: 78:4b:fb:9e:64:82:0a:d3:b8:4c:62:f3:64:f2:90:64 +# SHA1 Fingerprint: 31:f1:fd:68:22:63:20:ee:c6:3b:3f:9d:ea:4a:3e:53:7c:7c:39:17 +# SHA256 Fingerprint: c7:ba:65:67:de:93:a7:98:ae:1f:aa:79:1e:71:2d:37:8f:ae:1f:93:c4:39:7f:ea:44:1b:b7:cb:e6:fd:59:95 +-----BEGIN CERTIFICATE----- +MIIFYzCCA0ugAwIBAgIBOzANBgkqhkiG9w0BAQsFADBTMQswCQYDVQQGEwJJTDEW +MBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoGA1UEAxMjU3RhcnRDb20gQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkgRzIwHhcNMTAwMTAxMDEwMDAxWhcNMzkxMjMxMjM1 +OTAxWjBTMQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoG +A1UEAxMjU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgRzIwggIiMA0G +CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2iTZbB7cgNr2Cu+EWIAOVeq8Oo1XJ +JZlKxdBWQYeQTSFgpBSHO839sj60ZwNq7eEPS8CRhXBF4EKe3ikj1AENoBB5uNsD +vfOpL9HG4A/LnooUCri99lZi8cVytjIl2bLzvWXFDSxu1ZJvGIsAQRSCb0AgJnoo +D/Uefyf3lLE3PbfHkffiAez9lInhzG7TNtYKGXmu1zSCZf98Qru23QumNK9LYP5/ +Q0kGi4xDuFby2X8hQxfqp0iVAXV16iulQ5XqFYSdCI0mblWbq9zSOdIxHWDirMxW +RST1HFSr7obdljKF+ExP6JV2tgXdNiNnvP8V4so75qbsO+wmETRIjfaAKxojAuuK +HDp2KntWFhxyKrOq42ClAJ8Em+JvHhRYW6Vsi1g8w7pOOlz34ZYrPu8HvKTlXcxN +nw3h3Kq74W4a7I/htkxNeXJdFzULHdfBR9qWJODQcqhaX2YtENwvKhOuJv4KHBnM +0D4LnMgJLvlblnpHnOl68wVQdJVznjAJ85eCXuaPOQgeWeU1FEIT/wCc976qUM/i +UUjXuG+v+E5+M5iSFGI6dWPPe/regjupuznixL0sAA7IF6wT700ljtizkC+p2il9 +Ha90OrInwMEePnWjFqmveiJdnxMaz6eg6+OGCtP95paV1yPIN93EfKo2rJgaErHg +TuixO/XWb/Ew1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE +AwIBBjAdBgNVHQ4EFgQUS8W0QGutHLOlHGVuRjaJhwUMDrYwDQYJKoZIhvcNAQEL +BQADggIBAHNXPyzVlTJ+N9uWkusZXn5T50HsEbZH77Xe7XRcxfGOSeD8bpkTzZ+K +2s06Ctg6Wgk/XzTQLwPSZh0avZyQN8gMjgdalEVGKua+etqhqaRpEpKwfTbURIfX +UfEpY9Z1zRbkJ4kd+MIySP3bmdCPX1R0zKxnNBFi2QwKN4fRoxdIjtIXHfbX/dtl +6/2o1PXWT6RbdejF0mCy2wl+JYt7ulKSnj7oxXehPOBKc2thz4bcQ///If4jXSRK +9dNtD2IEBVeC2m6kMyV5Sy5UGYvMLD0w6dEG/+gyRr61M3Z3qAFdlsHB1b6uJcDJ +HgoJIIihDsnzb02CVAAgp9KP5DlUFy6NHrgbuxu9mk47EDTcnIhT76IxW1hPkWLI +wpqazRVdOKnWvvgTtZ8SafJQYqz7Fzf07rh1Z2AQ+4NQ+US1dZxAF7L+/XldblhY +XzD8AK6vM8EOTmy6p6ahfzLbOOCxchcKK5HsamMm7YnUeMx0HgX4a/6ManY5Ka5l +IxKVCCIcl85bBu4M4ru8H0ST9tg4RQUh7eStqxK2A6RCLi3ECToDZ2mEmuFZkIoo +hdVddLHRDiBYmxOlsGOm7XtH/UVVMKTumtTm4ofvmMkyghEpIrwACjFeLQ/Ajulr +so8uBtjRkcfGEvRM/TAXw8HaOFvjqermobp573PYtlNXLfbQ4ddI +-----END CERTIFICATE----- diff --git a/lib/httplib2/iri2uri.py b/lib/httplib2/iri2uri.py index 70667edf85..d88c91fdfb 100644 --- a/lib/httplib2/iri2uri.py +++ b/lib/httplib2/iri2uri.py @@ -16,7 +16,7 @@ # Convert an IRI to a URI following the rules in RFC 3987 -# +# # The characters we need to enocde and escape are defined in the spec: # # iprivate = %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD @@ -28,28 +28,28 @@ # / %xD0000-DFFFD / %xE1000-EFFFD escape_range = [ - (0xA0, 0xD7FF ), - (0xE000, 0xF8FF ), - (0xF900, 0xFDCF ), - (0xFDF0, 0xFFEF), - (0x10000, 0x1FFFD ), - (0x20000, 0x2FFFD ), - (0x30000, 0x3FFFD), - (0x40000, 0x4FFFD ), - (0x50000, 0x5FFFD ), - (0x60000, 0x6FFFD), - (0x70000, 0x7FFFD ), - (0x80000, 0x8FFFD ), - (0x90000, 0x9FFFD), - (0xA0000, 0xAFFFD ), - (0xB0000, 0xBFFFD ), - (0xC0000, 0xCFFFD), - (0xD0000, 0xDFFFD ), - (0xE1000, 0xEFFFD), - (0xF0000, 0xFFFFD ), - (0x100000, 0x10FFFD) + (0xA0, 0xD7FF), + (0xE000, 0xF8FF), + (0xF900, 0xFDCF), + (0xFDF0, 0xFFEF), + (0x10000, 0x1FFFD), + (0x20000, 0x2FFFD), + (0x30000, 0x3FFFD), + (0x40000, 0x4FFFD), + (0x50000, 0x5FFFD), + (0x60000, 0x6FFFD), + (0x70000, 0x7FFFD), + (0x80000, 0x8FFFD), + (0x90000, 0x9FFFD), + (0xA0000, 0xAFFFD), + (0xB0000, 0xBFFFD), + (0xC0000, 0xCFFFD), + (0xD0000, 0xDFFFD), + (0xE1000, 0xEFFFD), + (0xF0000, 0xFFFFD), + (0x100000, 0x10FFFD), ] - + def encode(c): retval = c i = ord(c) @@ -63,19 +63,19 @@ def encode(c): def iri2uri(uri): - """Convert an IRI to a URI. Note that IRIs must be + """Convert an IRI to a URI. Note that IRIs must be passed in a unicode strings. That is, do not utf-8 encode - the IRI before passing it into the function.""" + the IRI before passing it into the function.""" if isinstance(uri ,unicode): (scheme, authority, path, query, fragment) = urlparse.urlsplit(uri) authority = authority.encode('idna') # For each character in 'ucschar' or 'iprivate' # 1. encode as utf-8 - # 2. then %-encode each octet of that utf-8 + # 2. then %-encode each octet of that utf-8 uri = urlparse.urlunsplit((scheme, authority, path, query, fragment)) uri = "".join([encode(c) for c in uri]) return uri - + if __name__ == "__main__": import unittest @@ -83,7 +83,7 @@ class Test(unittest.TestCase): def test_uris(self): """Test that URIs are invariant under the transformation.""" - invariant = [ + invariant = [ u"ftp://ftp.is.co.za/rfc/rfc1808.txt", u"http://www.ietf.org/rfc/rfc2396.txt", u"ldap://[2001:db8::7]/c=GB?objectClass?one", @@ -94,7 +94,7 @@ def test_uris(self): u"urn:oasis:names:specification:docbook:dtd:xml:4.1.2" ] for uri in invariant: self.assertEqual(uri, iri2uri(uri)) - + def test_iri(self): """ Test that the right type of escaping is done for each part of the URI.""" self.assertEqual("http://xn--o3h.com/%E2%98%84", iri2uri(u"http://\N{COMET}.com/\N{COMET}")) @@ -107,4 +107,4 @@ def test_iri(self): unittest.main() - + diff --git a/lib/httplib2/socks.py b/lib/httplib2/socks.py new file mode 100644 index 0000000000..0991f4cf6e --- /dev/null +++ b/lib/httplib2/socks.py @@ -0,0 +1,438 @@ +"""SocksiPy - Python SOCKS module. +Version 1.00 + +Copyright 2006 Dan-Haim. All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +3. Neither the name of Dan Haim nor the names of his contributors may be used + to endorse or promote products derived from this software without specific + prior written permission. + +THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA +OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT +OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE. + + +This module provides a standard socket-like interface for Python +for tunneling connections through SOCKS proxies. + +""" + +""" + +Minor modifications made by Christopher Gilbert (http://motomastyle.com/) +for use in PyLoris (http://pyloris.sourceforge.net/) + +Minor modifications made by Mario Vilas (http://breakingcode.wordpress.com/) +mainly to merge bug fixes found in Sourceforge + +""" + +import base64 +import socket +import struct +import sys + +if getattr(socket, 'socket', None) is None: + raise ImportError('socket.socket missing, proxy support unusable') + +PROXY_TYPE_SOCKS4 = 1 +PROXY_TYPE_SOCKS5 = 2 +PROXY_TYPE_HTTP = 3 +PROXY_TYPE_HTTP_NO_TUNNEL = 4 + +_defaultproxy = None +_orgsocket = socket.socket + +class ProxyError(Exception): pass +class GeneralProxyError(ProxyError): pass +class Socks5AuthError(ProxyError): pass +class Socks5Error(ProxyError): pass +class Socks4Error(ProxyError): pass +class HTTPError(ProxyError): pass + +_generalerrors = ("success", + "invalid data", + "not connected", + "not available", + "bad proxy type", + "bad input") + +_socks5errors = ("succeeded", + "general SOCKS server failure", + "connection not allowed by ruleset", + "Network unreachable", + "Host unreachable", + "Connection refused", + "TTL expired", + "Command not supported", + "Address type not supported", + "Unknown error") + +_socks5autherrors = ("succeeded", + "authentication is required", + "all offered authentication methods were rejected", + "unknown username or invalid password", + "unknown error") + +_socks4errors = ("request granted", + "request rejected or failed", + "request rejected because SOCKS server cannot connect to identd on the client", + "request rejected because the client program and identd report different user-ids", + "unknown error") + +def setdefaultproxy(proxytype=None, addr=None, port=None, rdns=True, username=None, password=None): + """setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]]) + Sets a default proxy which all further socksocket objects will use, + unless explicitly changed. + """ + global _defaultproxy + _defaultproxy = (proxytype, addr, port, rdns, username, password) + +def wrapmodule(module): + """wrapmodule(module) + Attempts to replace a module's socket library with a SOCKS socket. Must set + a default proxy using setdefaultproxy(...) first. + This will only work on modules that import socket directly into the namespace; + most of the Python Standard Library falls into this category. + """ + if _defaultproxy != None: + module.socket.socket = socksocket + else: + raise GeneralProxyError((4, "no proxy specified")) + +class socksocket(socket.socket): + """socksocket([family[, type[, proto]]]) -> socket object + Open a SOCKS enabled socket. The parameters are the same as + those of the standard socket init. In order for SOCKS to work, + you must specify family=AF_INET, type=SOCK_STREAM and proto=0. + """ + + def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None): + _orgsocket.__init__(self, family, type, proto, _sock) + if _defaultproxy != None: + self.__proxy = _defaultproxy + else: + self.__proxy = (None, None, None, None, None, None) + self.__proxysockname = None + self.__proxypeername = None + self.__httptunnel = True + + def __recvall(self, count): + """__recvall(count) -> data + Receive EXACTLY the number of bytes requested from the socket. + Blocks until the required number of bytes have been received. + """ + data = self.recv(count) + while len(data) < count: + d = self.recv(count-len(data)) + if not d: raise GeneralProxyError((0, "connection closed unexpectedly")) + data = data + d + return data + + def sendall(self, content, *args): + """ override socket.socket.sendall method to rewrite the header + for non-tunneling proxies if needed + """ + if not self.__httptunnel: + content = self.__rewriteproxy(content) + return super(socksocket, self).sendall(content, *args) + + def __rewriteproxy(self, header): + """ rewrite HTTP request headers to support non-tunneling proxies + (i.e. those which do not support the CONNECT method). + This only works for HTTP (not HTTPS) since HTTPS requires tunneling. + """ + host, endpt = None, None + hdrs = header.split("\r\n") + for hdr in hdrs: + if hdr.lower().startswith("host:"): + host = hdr + elif hdr.lower().startswith("get") or hdr.lower().startswith("post"): + endpt = hdr + if host and endpt: + hdrs.remove(host) + hdrs.remove(endpt) + host = host.split(" ")[1] + endpt = endpt.split(" ") + if (self.__proxy[4] != None and self.__proxy[5] != None): + hdrs.insert(0, self.__getauthheader()) + hdrs.insert(0, "Host: %s" % host) + hdrs.insert(0, "%s http://%s%s %s" % (endpt[0], host, endpt[1], endpt[2])) + return "\r\n".join(hdrs) + + def __getauthheader(self): + auth = self.__proxy[4] + ":" + self.__proxy[5] + return "Proxy-Authorization: Basic " + base64.b64encode(auth) + + def setproxy(self, proxytype=None, addr=None, port=None, rdns=True, username=None, password=None): + """setproxy(proxytype, addr[, port[, rdns[, username[, password]]]]) + Sets the proxy to be used. + proxytype - The type of the proxy to be used. Three types + are supported: PROXY_TYPE_SOCKS4 (including socks4a), + PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP + addr - The address of the server (IP or DNS). + port - The port of the server. Defaults to 1080 for SOCKS + servers and 8080 for HTTP proxy servers. + rdns - Should DNS queries be preformed on the remote side + (rather than the local side). The default is True. + Note: This has no effect with SOCKS4 servers. + username - Username to authenticate with to the server. + The default is no authentication. + password - Password to authenticate with to the server. + Only relevant when username is also provided. + """ + self.__proxy = (proxytype, addr, port, rdns, username, password) + + def __negotiatesocks5(self, destaddr, destport): + """__negotiatesocks5(self,destaddr,destport) + Negotiates a connection through a SOCKS5 server. + """ + # First we'll send the authentication packages we support. + if (self.__proxy[4]!=None) and (self.__proxy[5]!=None): + # The username/password details were supplied to the + # setproxy method so we support the USERNAME/PASSWORD + # authentication (in addition to the standard none). + self.sendall(struct.pack('BBBB', 0x05, 0x02, 0x00, 0x02)) + else: + # No username/password were entered, therefore we + # only support connections with no authentication. + self.sendall(struct.pack('BBB', 0x05, 0x01, 0x00)) + # We'll receive the server's response to determine which + # method was selected + chosenauth = self.__recvall(2) + if chosenauth[0:1] != chr(0x05).encode(): + self.close() + raise GeneralProxyError((1, _generalerrors[1])) + # Check the chosen authentication method + if chosenauth[1:2] == chr(0x00).encode(): + # No authentication is required + pass + elif chosenauth[1:2] == chr(0x02).encode(): + # Okay, we need to perform a basic username/password + # authentication. + self.sendall(chr(0x01).encode() + chr(len(self.__proxy[4])) + self.__proxy[4] + chr(len(self.__proxy[5])) + self.__proxy[5]) + authstat = self.__recvall(2) + if authstat[0:1] != chr(0x01).encode(): + # Bad response + self.close() + raise GeneralProxyError((1, _generalerrors[1])) + if authstat[1:2] != chr(0x00).encode(): + # Authentication failed + self.close() + raise Socks5AuthError((3, _socks5autherrors[3])) + # Authentication succeeded + else: + # Reaching here is always bad + self.close() + if chosenauth[1] == chr(0xFF).encode(): + raise Socks5AuthError((2, _socks5autherrors[2])) + else: + raise GeneralProxyError((1, _generalerrors[1])) + # Now we can request the actual connection + req = struct.pack('BBB', 0x05, 0x01, 0x00) + # If the given destination address is an IP address, we'll + # use the IPv4 address request even if remote resolving was specified. + try: + ipaddr = socket.inet_aton(destaddr) + req = req + chr(0x01).encode() + ipaddr + except socket.error: + # Well it's not an IP number, so it's probably a DNS name. + if self.__proxy[3]: + # Resolve remotely + ipaddr = None + req = req + chr(0x03).encode() + chr(len(destaddr)).encode() + destaddr + else: + # Resolve locally + ipaddr = socket.inet_aton(socket.gethostbyname(destaddr)) + req = req + chr(0x01).encode() + ipaddr + req = req + struct.pack(">H", destport) + self.sendall(req) + # Get the response + resp = self.__recvall(4) + if resp[0:1] != chr(0x05).encode(): + self.close() + raise GeneralProxyError((1, _generalerrors[1])) + elif resp[1:2] != chr(0x00).encode(): + # Connection failed + self.close() + if ord(resp[1:2])<=8: + raise Socks5Error((ord(resp[1:2]), _socks5errors[ord(resp[1:2])])) + else: + raise Socks5Error((9, _socks5errors[9])) + # Get the bound address/port + elif resp[3:4] == chr(0x01).encode(): + boundaddr = self.__recvall(4) + elif resp[3:4] == chr(0x03).encode(): + resp = resp + self.recv(1) + boundaddr = self.__recvall(ord(resp[4:5])) + else: + self.close() + raise GeneralProxyError((1,_generalerrors[1])) + boundport = struct.unpack(">H", self.__recvall(2))[0] + self.__proxysockname = (boundaddr, boundport) + if ipaddr != None: + self.__proxypeername = (socket.inet_ntoa(ipaddr), destport) + else: + self.__proxypeername = (destaddr, destport) + + def getproxysockname(self): + """getsockname() -> address info + Returns the bound IP address and port number at the proxy. + """ + return self.__proxysockname + + def getproxypeername(self): + """getproxypeername() -> address info + Returns the IP and port number of the proxy. + """ + return _orgsocket.getpeername(self) + + def getpeername(self): + """getpeername() -> address info + Returns the IP address and port number of the destination + machine (note: getproxypeername returns the proxy) + """ + return self.__proxypeername + + def __negotiatesocks4(self,destaddr,destport): + """__negotiatesocks4(self,destaddr,destport) + Negotiates a connection through a SOCKS4 server. + """ + # Check if the destination address provided is an IP address + rmtrslv = False + try: + ipaddr = socket.inet_aton(destaddr) + except socket.error: + # It's a DNS name. Check where it should be resolved. + if self.__proxy[3]: + ipaddr = struct.pack("BBBB", 0x00, 0x00, 0x00, 0x01) + rmtrslv = True + else: + ipaddr = socket.inet_aton(socket.gethostbyname(destaddr)) + # Construct the request packet + req = struct.pack(">BBH", 0x04, 0x01, destport) + ipaddr + # The username parameter is considered userid for SOCKS4 + if self.__proxy[4] != None: + req = req + self.__proxy[4] + req = req + chr(0x00).encode() + # DNS name if remote resolving is required + # NOTE: This is actually an extension to the SOCKS4 protocol + # called SOCKS4A and may not be supported in all cases. + if rmtrslv: + req = req + destaddr + chr(0x00).encode() + self.sendall(req) + # Get the response from the server + resp = self.__recvall(8) + if resp[0:1] != chr(0x00).encode(): + # Bad data + self.close() + raise GeneralProxyError((1,_generalerrors[1])) + if resp[1:2] != chr(0x5A).encode(): + # Server returned an error + self.close() + if ord(resp[1:2]) in (91, 92, 93): + self.close() + raise Socks4Error((ord(resp[1:2]), _socks4errors[ord(resp[1:2]) - 90])) + else: + raise Socks4Error((94, _socks4errors[4])) + # Get the bound address/port + self.__proxysockname = (socket.inet_ntoa(resp[4:]), struct.unpack(">H", resp[2:4])[0]) + if rmtrslv != None: + self.__proxypeername = (socket.inet_ntoa(ipaddr), destport) + else: + self.__proxypeername = (destaddr, destport) + + def __negotiatehttp(self, destaddr, destport): + """__negotiatehttp(self,destaddr,destport) + Negotiates a connection through an HTTP server. + """ + # If we need to resolve locally, we do this now + if not self.__proxy[3]: + addr = socket.gethostbyname(destaddr) + else: + addr = destaddr + headers = ["CONNECT ", addr, ":", str(destport), " HTTP/1.1\r\n"] + headers += ["Host: ", destaddr, "\r\n"] + if (self.__proxy[4] != None and self.__proxy[5] != None): + headers += [self.__getauthheader(), "\r\n"] + headers.append("\r\n") + self.sendall("".join(headers).encode()) + # We read the response until we get the string "\r\n\r\n" + resp = self.recv(1) + while resp.find("\r\n\r\n".encode()) == -1: + resp = resp + self.recv(1) + # We just need the first line to check if the connection + # was successful + statusline = resp.splitlines()[0].split(" ".encode(), 2) + if statusline[0] not in ("HTTP/1.0".encode(), "HTTP/1.1".encode()): + self.close() + raise GeneralProxyError((1, _generalerrors[1])) + try: + statuscode = int(statusline[1]) + except ValueError: + self.close() + raise GeneralProxyError((1, _generalerrors[1])) + if statuscode != 200: + self.close() + raise HTTPError((statuscode, statusline[2])) + self.__proxysockname = ("0.0.0.0", 0) + self.__proxypeername = (addr, destport) + + def connect(self, destpair): + """connect(self, despair) + Connects to the specified destination through a proxy. + destpar - A tuple of the IP/DNS address and the port number. + (identical to socket's connect). + To select the proxy server use setproxy(). + """ + # Do a minimal input check first + if (not type(destpair) in (list,tuple)) or (len(destpair) < 2) or (not isinstance(destpair[0], basestring)) or (type(destpair[1]) != int): + raise GeneralProxyError((5, _generalerrors[5])) + if self.__proxy[0] == PROXY_TYPE_SOCKS5: + if self.__proxy[2] != None: + portnum = self.__proxy[2] + else: + portnum = 1080 + _orgsocket.connect(self, (self.__proxy[1], portnum)) + self.__negotiatesocks5(destpair[0], destpair[1]) + elif self.__proxy[0] == PROXY_TYPE_SOCKS4: + if self.__proxy[2] != None: + portnum = self.__proxy[2] + else: + portnum = 1080 + _orgsocket.connect(self,(self.__proxy[1], portnum)) + self.__negotiatesocks4(destpair[0], destpair[1]) + elif self.__proxy[0] == PROXY_TYPE_HTTP: + if self.__proxy[2] != None: + portnum = self.__proxy[2] + else: + portnum = 8080 + _orgsocket.connect(self,(self.__proxy[1], portnum)) + self.__negotiatehttp(destpair[0], destpair[1]) + elif self.__proxy[0] == PROXY_TYPE_HTTP_NO_TUNNEL: + if self.__proxy[2] != None: + portnum = self.__proxy[2] + else: + portnum = 8080 + _orgsocket.connect(self,(self.__proxy[1],portnum)) + if destpair[1] == 443: + self.__negotiatehttp(destpair[0],destpair[1]) + else: + self.__httptunnel = False + elif self.__proxy[0] == None: + _orgsocket.connect(self, (destpair[0], destpair[1])) + else: + raise GeneralProxyError((4, _generalerrors[4])) diff --git a/lib/jsonrpclib/SimpleJSONRPCServer.py b/lib/jsonrpclib/SimpleJSONRPCServer.py index d76da73e3f..3a0a3bba5a 100644 --- a/lib/jsonrpclib/SimpleJSONRPCServer.py +++ b/lib/jsonrpclib/SimpleJSONRPCServer.py @@ -15,6 +15,7 @@ # For Windows fcntl = None + def get_version(request): # must be a dict if 'jsonrpc' in request.keys(): @@ -22,9 +23,10 @@ def get_version(request): if 'id' in request.keys(): return 1.0 return None - + + def validate_request(request): - if type(request) is not types.DictType: + if not isinstance(request, dict): fault = Fault( -32600, 'Request must be {}, not %s.' % type(request) ) @@ -33,27 +35,27 @@ def validate_request(request): version = get_version(request) if not version: fault = Fault(-32600, 'Request %s invalid.' % request, rpcid=rpcid) - return fault + return fault request.setdefault('params', []) method = request.get('method', None) params = request.get('params') param_types = (types.ListType, types.DictType, types.TupleType) if not method or type(method) not in types.StringTypes or \ - type(params) not in param_types: + type(params) not in param_types: fault = Fault( -32600, 'Invalid request parameters or method.', rpcid=rpcid ) return fault return True + class SimpleJSONRPCDispatcher(SimpleXMLRPCServer.SimpleXMLRPCDispatcher): def __init__(self, encoding=None): - SimpleXMLRPCServer.SimpleXMLRPCDispatcher.__init__(self, - allow_none=True, - encoding=encoding) + SimpleXMLRPCServer.SimpleXMLRPCDispatcher.__init__( + self, allow_none=True, encoding=encoding) - def _marshaled_dispatch(self, data, dispatch_method = None): + def _marshaled_dispatch(self, data, dispatch_method=None): response = None try: request = jsonrpclib.loads(data) @@ -64,7 +66,7 @@ def _marshaled_dispatch(self, data, dispatch_method = None): if not request: fault = Fault(-32600, 'Request invalid -- no request data.') return fault.response() - if type(request) is types.ListType: + if isinstance(request, list): # This SHOULD be a batch, by spec responses = [] for req_entry in request: @@ -79,7 +81,7 @@ def _marshaled_dispatch(self, data, dispatch_method = None): response = '[%s]' % ','.join(responses) else: response = '' - else: + else: result = validate_request(request) if type(result) is Fault: return result.response() @@ -99,7 +101,7 @@ def _marshaled_single_dispatch(self, request): exc_type, exc_value, exc_tb = sys.exc_info() fault = Fault(-32603, '%s:%s' % (exc_type, exc_value)) return fault.response() - if 'id' not in request.keys() or request['id'] == None: + if 'id' not in request.keys() or request['id'] is None: # It's a notification return None try: @@ -132,25 +134,26 @@ def _dispatch(self, method, params): pass if func is not None: try: - if type(params) is types.ListType: + if isinstance(params, types.ListType): response = func(*params) else: response = func(**params) return response - except TypeError: - return Fault(-32602, 'Invalid parameters.') + # except TypeError: + # return Fault(-32602, 'Invalid parameters.') except: err_lines = traceback.format_exc().splitlines() trace_string = '%s | %s' % (err_lines[-3], err_lines[-1]) - fault = jsonrpclib.Fault(-32603, 'Server error: %s' % + fault = jsonrpclib.Fault(-32603, 'Server error: %s' % trace_string) return fault else: return Fault(-32601, 'Method %s not supported.' % method) + class SimpleJSONRPCRequestHandler( SimpleXMLRPCServer.SimpleXMLRPCRequestHandler): - + def do_POST(self): if not self.is_rpc_path_valid(): self.report_404() @@ -166,13 +169,13 @@ def do_POST(self): data = ''.join(L) response = self.server._marshaled_dispatch(data) self.send_response(200) - except Exception, e: + except Exception: self.send_response(500) err_lines = traceback.format_exc().splitlines() trace_string = '%s | %s' % (err_lines[-3], err_lines[-1]) fault = jsonrpclib.Fault(-32603, 'Server error: %s' % trace_string) response = fault.response() - if response == None: + if response is None: response = '' self.send_header("Content-type", "application/json-rpc") self.send_header("Content-length", str(len(response))) @@ -181,6 +184,7 @@ def do_POST(self): self.wfile.flush() self.connection.shutdown(1) + class SimpleJSONRPCServer(SocketServer.TCPServer, SimpleJSONRPCDispatcher): allow_reuse_address = True @@ -198,7 +202,7 @@ def __init__(self, addr, requestHandler=SimpleJSONRPCRequestHandler, # Unix sockets can't be bound if they already exist in the # filesystem. The convention of e.g. X11 is to unlink # before binding again. - if os.path.exists(addr): + if os.path.exists(addr): try: os.unlink(addr) except OSError: @@ -207,13 +211,14 @@ def __init__(self, addr, requestHandler=SimpleJSONRPCRequestHandler, if vi[0] < 3 and vi[1] < 6: SocketServer.TCPServer.__init__(self, addr, requestHandler) else: - SocketServer.TCPServer.__init__(self, addr, requestHandler, - bind_and_activate) + SocketServer.TCPServer.__init__( + self, addr, requestHandler, bind_and_activate) if fcntl is not None and hasattr(fcntl, 'FD_CLOEXEC'): flags = fcntl.fcntl(self.fileno(), fcntl.F_GETFD) flags |= fcntl.FD_CLOEXEC fcntl.fcntl(self.fileno(), fcntl.F_SETFD, flags) + class CGIJSONRPCRequestHandler(SimpleJSONRPCDispatcher): def __init__(self, encoding=None): diff --git a/lib/jsonrpclib/__init__.py b/lib/jsonrpclib/__init__.py index 92c29b3977..6e884b83f6 100644 --- a/lib/jsonrpclib/__init__.py +++ b/lib/jsonrpclib/__init__.py @@ -1,7 +1,6 @@ -from config import Config +from jsonrpclib.config import Config config = Config.instance() -from history import History +from jsonrpclib.history import History history = History.instance() -import jsonrpc -from jsonrpc import Server, MultiCall, Fault -from jsonrpc import ProtocolError, loads, dumps +from jsonrpclib.jsonrpc import Server, MultiCall, Fault +from jsonrpclib.jsonrpc import ProtocolError, loads, dumps diff --git a/lib/jsonrpclib/config.py b/lib/jsonrpclib/config.py index 4d28f1b1fa..ca926ca0a7 100644 --- a/lib/jsonrpclib/config.py +++ b/lib/jsonrpclib/config.py @@ -1,12 +1,14 @@ import sys + class LocalClasses(dict): def add(self, cls): self[cls.__name__] = cls + class Config(object): """ - This is pretty much used exclusively for the 'jsonclass' + This is pretty much used exclusively for the 'jsonclass' functionality... set use_jsonclass to False to turn it off. You can change serialize_method and ignore_attribute, or use the local_classes.add(class) to include "local" classes. @@ -15,7 +17,7 @@ class Config(object): # Change to False to keep __jsonclass__ entries raw. serialize_method = '_serialize' # The serialize_method should be a string that references the - # method on a custom class object which is responsible for + # method on a custom class object which is responsible for # returning a tuple of the constructor arguments and a dict of # attributes. ignore_attribute = '_ignore' @@ -30,7 +32,7 @@ class Config(object): '.'.join([str(ver) for ver in sys.version_info[0:3]]) # User agent to use for calls. _instance = None - + @classmethod def instance(cls): if not cls._instance: diff --git a/lib/jsonrpclib/history.py b/lib/jsonrpclib/history.py index d11863dcda..f052baa34e 100644 --- a/lib/jsonrpclib/history.py +++ b/lib/jsonrpclib/history.py @@ -2,13 +2,13 @@ class History(object): """ This holds all the response and request objects for a session. A server using this should call "clear" after - each request cycle in order to keep it from clogging + each request cycle in order to keep it from clogging memory. """ requests = [] responses = [] _instance = None - + @classmethod def instance(cls): if not cls._instance: @@ -17,7 +17,7 @@ def instance(cls): def add_response(self, response_obj): self.responses.append(response_obj) - + def add_request(self, request_obj): self.requests.append(request_obj) diff --git a/lib/jsonrpclib/jsonclass.py b/lib/jsonrpclib/jsonclass.py index 298c3da3ee..4326f2808a 100644 --- a/lib/jsonrpclib/jsonclass.py +++ b/lib/jsonrpclib/jsonclass.py @@ -1,7 +1,6 @@ import types import inspect import re -import traceback from jsonrpclib import config @@ -30,9 +29,11 @@ supported_types = iter_types+string_types+numeric_types+value_types invalid_module_chars = r'[^a-zA-Z0-9\_\.]' + class TranslationError(Exception): pass + def dump(obj, serialize_method=None, ignore_attribute=None, ignore=[]): if not serialize_method: serialize_method = config.serialize_method @@ -46,17 +47,17 @@ def dump(obj, serialize_method=None, ignore_attribute=None, ignore=[]): if obj_type in (types.ListType, types.TupleType): new_obj = [] for item in obj: - new_obj.append(dump(item, serialize_method, - ignore_attribute, ignore)) - if obj_type is types.TupleType: + new_obj.append( + dump(item, serialize_method, ignore_attribute, ignore)) + if isinstance(obj_type, types.TupleType): new_obj = tuple(new_obj) return new_obj # It's a dict... else: new_obj = {} for key, value in obj.iteritems(): - new_obj[key] = dump(value, serialize_method, - ignore_attribute, ignore) + new_obj[key] = dump( + value, serialize_method, ignore_attribute, ignore) return new_obj # It's not a standard type, so it needs __jsonclass__ module_name = inspect.getmodule(obj).__name__ @@ -64,7 +65,7 @@ def dump(obj, serialize_method=None, ignore_attribute=None, ignore=[]): json_class = class_name if module_name not in ['', '__main__']: json_class = '%s.%s' % (module_name, json_class) - return_obj = {"__jsonclass__":[json_class,]} + return_obj = {"__jsonclass__": [json_class]} # If a serialization method is defined.. if serialize_method in dir(obj): # Params can be a dict (keyword) or list (positional) @@ -84,21 +85,23 @@ def dump(obj, serialize_method=None, ignore_attribute=None, ignore=[]): if type(attr_value) in supported_types and \ attr_name not in ignore_list and \ attr_value not in ignore_list: - attrs[attr_name] = dump(attr_value, serialize_method, - ignore_attribute, ignore) + attrs[attr_name] = dump( + attr_value, serialize_method, ignore_attribute, ignore) return_obj.update(attrs) return return_obj + def load(obj): - if type(obj) in string_types+numeric_types+value_types: + if type(obj) in string_types + numeric_types + value_types: return obj - if type(obj) is types.ListType: + + if isinstance(obj, list): return_list = [] for entry in obj: return_list.append(load(entry)) return return_list # Othewise, it's a dict type - if '__jsonclass__' not in obj.keys(): + if '__jsonclass__' not in obj: return_dict = {} for key, value in obj.iteritems(): new_value = load(value) @@ -129,12 +132,19 @@ def load(obj): except ImportError: raise TranslationError('Could not import %s from module %s.' % (json_class_name, json_module_tree)) + + # The returned class is the top-level module, not the one we really + # want. (E.g., if we import a.b.c, we now have a.) Walk through other + # path components to get to b and c. + for i in json_module_parts[1:]: + temp_module = getattr(temp_module, i) + json_class = getattr(temp_module, json_class_name) # Creating the object... new_obj = None - if type(params) is types.ListType: + if isinstance(params, list): new_obj = json_class(*params) - elif type(params) is types.DictType: + elif isinstance(params, dict): new_obj = json_class(**params) else: raise TranslationError('Constructor args must be a dict or list.') diff --git a/lib/jsonrpclib/jsonrpc.py b/lib/jsonrpclib/jsonrpc.py index e11939aea1..167bcd7f2d 100644 --- a/lib/jsonrpclib/jsonrpc.py +++ b/lib/jsonrpclib/jsonrpc.py @@ -1,15 +1,15 @@ """ -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 + http://www.apache.org/licenses/LICENSE-2.0 -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. ============================ JSONRPC Library (jsonrpclib) @@ -29,7 +29,7 @@ and other things to tie the thing off nicely. :) For a quick-start, just open a console and type the following, -replacing the server address, method, and parameters +replacing the server address, method, and parameters appropriately. >>> import jsonrpclib >>> server = jsonrpclib.Server('http://localhost:8181') @@ -47,17 +47,14 @@ """ import types -import sys from xmlrpclib import Transport as XMLTransport from xmlrpclib import SafeTransport as XMLSafeTransport from xmlrpclib import ServerProxy as XMLServerProxy from xmlrpclib import _Method as XML_Method -import time import string import random # Library includes -import jsonrpclib from jsonrpclib import config from jsonrpclib import history @@ -80,14 +77,17 @@ IDCHARS = string.ascii_lowercase+string.digits + class UnixSocketMissing(Exception): - """ - Just a properly named Exception if Unix Sockets usage is + """ + Just a properly named Exception if Unix Sockets usage is attempted on a platform that doesn't support them (Windows) """ pass -#JSON Abstractions + +# JSON Abstractions + def jdumps(obj, encoding='utf-8'): # Do 'serialize' test at some point for other classes @@ -97,6 +97,7 @@ def jdumps(obj, encoding='utf-8'): else: return json.dumps(obj, encoding=encoding) + def jloads(json_string): global cjson if cjson: @@ -107,14 +108,17 @@ def jloads(json_string): # XMLRPClib re-implementations + class ProtocolError(Exception): pass + class TransportMixIn(object): """ Just extends the XMLRPC transport where necessary. """ user_agent = config.user_agent # for Python 2.7 support - _connection = None + _connection = (None, None) + _extra_headers = [] def send_content(self, connection, request_body): connection.putheader("Content-Type", "application/json-rpc") @@ -127,6 +131,7 @@ def getparser(self): target = JSONTarget() return JSONParser(target), target + class JSONParser(object): def __init__(self, target): self.target = target @@ -137,6 +142,7 @@ def feed(self, data): def close(self): pass + class JSONTarget(object): def __init__(self): self.data = [] @@ -147,24 +153,31 @@ def feed(self, data): def close(self): return ''.join(self.data) + class Transport(TransportMixIn, XMLTransport): - pass + def __init__(self): + TransportMixIn.__init__(self) + XMLTransport.__init__(self) + class SafeTransport(TransportMixIn, XMLSafeTransport): - pass + def __init__(self): + TransportMixIn.__init__(self) + XMLSafeTransport.__init__(self) + from httplib import HTTP, HTTPConnection from socket import socket USE_UNIX_SOCKETS = False -try: +try: from socket import AF_UNIX, SOCK_STREAM USE_UNIX_SOCKETS = True except ImportError: pass - + if (USE_UNIX_SOCKETS): - + class UnixHTTPConnection(HTTPConnection): def connect(self): self.sock = socket(AF_UNIX, SOCK_STREAM) @@ -174,19 +187,19 @@ class UnixHTTP(HTTP): _connection_class = UnixHTTPConnection class UnixTransport(TransportMixIn, XMLTransport): + def make_connection(self, host): - import httplib host, extra_headers, x509 = self.get_host_info(host) return UnixHTTP(host) - + class ServerProxy(XMLServerProxy): """ Unfortunately, much more of this class has to be copied since so much of it does the serialization. """ - def __init__(self, uri, transport=None, encoding=None, + def __init__(self, uri, transport=None, encoding=None, verbose=0, version=None): import urllib if not version: @@ -205,7 +218,7 @@ def __init__(self, uri, transport=None, encoding=None, self.__host, self.__handler = urllib.splithost(uri) if not self.__handler: # Not sure if this is in the JSON spec? - #self.__handler = '/' + # self.__handler = '/' self.__handler == '/' if transport is None: if schema == 'unix': @@ -241,13 +254,13 @@ def _run_request(self, request, notify=None): request, verbose=self.__verbose ) - + # Here, the XMLRPC library translates a single list # response to the single value -- should we do the # same, and require a tuple / list to be passed to - # the response object, or expect the Server to be + # the response object, or expect the Server to be # outputting the response appropriately? - + history.add_response(response) if not response: return None @@ -265,22 +278,29 @@ def _notify(self): class _Method(XML_Method): - + def __call__(self, *args, **kwargs): if len(args) > 0 and len(kwargs) > 0: - raise ProtocolError('Cannot use both positional ' + - 'and keyword arguments (according to JSON-RPC spec.)') + raise ProtocolError( + 'Cannot use both positional and keyword arguments ' + '(according to JSON-RPC spec.)') if len(args) > 0: return self.__send(self.__name, args) else: return self.__send(self.__name, kwargs) def __getattr__(self, name): - self.__name = '%s.%s' % (self.__name, name) - return self - # The old method returned a new instance, but this seemed wasteful. - # The only thing that changes is the name. - #return _Method(self.__send, "%s.%s" % (self.__name, name)) + return _Method(self.__send, "%s.%s" % (self.__name, name)) + + def __repr__(self): + return '<{} "{}">'.format(self.__class__.__name__, self.__name) + + def __str__(self): + return self.__repr__() + + def __dir__(self): + return self.__dict__.keys() + class _Notify(object): def __init__(self, request): @@ -288,11 +308,13 @@ def __init__(self, request): def __getattr__(self, name): return _Method(self._request, name) - + + # Batch implementation + class MultiCallMethod(object): - + def __init__(self, method, notify=False): self.method = method self.params = [] @@ -313,14 +335,15 @@ def request(self, encoding=None, rpcid=None): def __repr__(self): return '%s' % self.request() - + def __getattr__(self, method): new_method = '%s.%s' % (self.method, method) self.method = new_method return self + class MultiCallNotify(object): - + def __init__(self, multicall): self.multicall = multicall @@ -329,8 +352,9 @@ def __getattr__(self, name): self.multicall._job_list.append(new_job) return new_job + class MultiCallIterator(object): - + def __init__(self, results): self.results = results @@ -347,8 +371,9 @@ def __getitem__(self, i): def __len__(self): return len(self.results) + class MultiCall(object): - + def __init__(self, server): self._server = server self._job_list = [] @@ -357,8 +382,8 @@ def _request(self): if len(self._job_list) < 1: # Should we alert? This /is/ pretty obvious. return - request_body = '[ %s ]' % ','.join([job.request() for - job in self._job_list]) + request_body = '[ {0} ]'.format( + ','.join([job.request() for job in self._job_list])) responses = self._server._run_request(request_body) del self._job_list[:] if not responses: @@ -376,19 +401,21 @@ def __getattr__(self, name): __call__ = _request -# These lines conform to xmlrpclib's "compatibility" line. +# These lines conform to xmlrpclib's "compatibility" line. # Not really sure if we should include these, but oh well. Server = ServerProxy + class Fault(object): # JSON-RPC error class + def __init__(self, code=-32000, message='Server error', rpcid=None): self.faultCode = code self.faultString = message self.rpcid = rpcid def error(self): - return {'code':self.faultCode, 'message':self.faultString} + return {'code': self.faultCode, 'message': self.faultString} def response(self, rpcid=None, version=None): if not version: @@ -402,25 +429,27 @@ def response(self, rpcid=None, version=None): def __repr__(self): return '' % (self.faultCode, self.faultString) + def random_id(length=8): return_id = '' for i in range(length): return_id += random.choice(IDCHARS) return return_id + class Payload(dict): def __init__(self, rpcid=None, version=None): if not version: version = config.version self.id = rpcid self.version = float(version) - + def request(self, method, params=[]): if type(method) not in types.StringTypes: raise ValueError('Method name must be a string.') if not self.id: self.id = random_id() - request = { 'id':self.id, 'method':method } + request = {'id': self.id, 'method': method} if params: request['params'] = params if self.version >= 2: @@ -436,7 +465,7 @@ def notify(self, method, params=[]): return request def response(self, result=None): - response = {'result':result, 'id':self.id} + response = {'result': result, 'id': self.id} if self.version >= 2: response['jsonrpc'] = str(self.version) else: @@ -449,13 +478,15 @@ def error(self, code=-32000, message='Server error.'): del error['result'] else: error['result'] = None - error['error'] = {'code':code, 'message':message} + error['error'] = {'code': code, 'message': message} return error -def dumps(params=[], methodname=None, methodresponse=None, + +def dumps( + params=[], methodname=None, methodresponse=None, encoding=None, rpcid=None, version=None, notify=None): """ - This differs from the Python implementation in that it implements + This differs from the Python implementation in that it implements the rpcid argument since the 2.0 spec requires it for responses. """ if not version: @@ -464,7 +495,7 @@ def dumps(params=[], methodname=None, methodresponse=None, if methodname in types.StringTypes and \ type(params) not in valid_params and \ not isinstance(params, Fault): - """ + """ If a method, and params are not in a listish or a Fault, error out. """ @@ -477,10 +508,14 @@ def dumps(params=[], methodname=None, methodresponse=None, if type(params) is Fault: response = payload.error(params.faultCode, params.faultString) return jdumps(response, encoding=encoding) - if type(methodname) not in types.StringTypes and methodresponse != True: - raise ValueError('Method name must be a string, or methodresponse '+ - 'must be set to True.') - if config.use_jsonclass == True: + + if type(methodname) not in types.StringTypes and \ + methodresponse is not True: + raise ValueError( + 'Method name must be a string, or methodresponse must ' + 'be set to True.') + + if config.use_jsonclass is True: from jsonrpclib import jsonclass params = jsonclass.dump(params) if methodresponse is True: @@ -489,12 +524,13 @@ def dumps(params=[], methodname=None, methodresponse=None, response = payload.response(params) return jdumps(response, encoding=encoding) request = None - if notify == True: + if notify is True: request = payload.notify(methodname, params) else: request = payload.request(methodname, params) return jdumps(request, encoding=encoding) + def loads(data): """ This differs from the Python implementation, in that it returns @@ -505,36 +541,39 @@ def loads(data): # notification return None result = jloads(data) - # if the above raises an error, the implementing server code + # if the above raises an error, the implementing server code # should return something like the following: # { 'jsonrpc':'2.0', 'error': fault.error(), id: None } - if config.use_jsonclass == True: + if config.use_jsonclass is True: from jsonrpclib import jsonclass result = jsonclass.load(result) return result + def check_for_errors(result): if not result: # Notification return result - if type(result) is not types.DictType: + + if not isinstance(result, dict): raise TypeError('Response is not a dict.') if 'jsonrpc' in result.keys() and float(result['jsonrpc']) > 2.0: raise NotImplementedError('JSON-RPC version not yet supported.') if 'result' not in result.keys() and 'error' not in result.keys(): raise ValueError('Response does not have a result or error key.') - if 'error' in result.keys() and result['error'] != None: + if 'error' in result.keys() and result['error'] is not None: code = result['error']['code'] message = result['error']['message'] raise ProtocolError((code, message)) return result + def isbatch(result): if type(result) not in (types.ListType, types.TupleType): return False if len(result) < 1: return False - if type(result[0]) is not types.DictType: + if not isinstance(result[0], dict): return False if 'jsonrpc' not in result[0].keys(): return False @@ -546,11 +585,12 @@ def isbatch(result): return False return True + def isnotification(request): if 'id' not in request.keys(): # 2.0 notification return True - if request['id'] == None: + if request['id'] is None: # 1.0 notification return True return False diff --git a/lib/lockfile/__init__.py b/lib/lockfile/__init__.py index 04b23503cf..a6f44a55c6 100644 --- a/lib/lockfile/__init__.py +++ b/lib/lockfile/__init__.py @@ -53,14 +53,11 @@ from __future__ import absolute_import -import sys -import socket +import functools import os +import socket import threading -import time -import urllib import warnings -import functools # Work with PEP8 and non-PEP8 versions of threading module. if not hasattr(threading, "current_thread"): @@ -73,6 +70,7 @@ 'LinkFileLock', 'MkdirFileLock', 'SQLiteFileLock', 'LockBase', 'locked'] + class Error(Exception): """ Base class for other exceptions. @@ -84,6 +82,7 @@ class Error(Exception): """ pass + class LockError(Error): """ Base class for error arising from attempts to acquire the lock. @@ -95,6 +94,7 @@ class LockError(Error): """ pass + class LockTimeout(LockError): """Raised when lock creation fails within a user-defined period of time. @@ -105,6 +105,7 @@ class LockTimeout(LockError): """ pass + class AlreadyLocked(LockError): """Some other thread/process is locking the file. @@ -115,6 +116,7 @@ class AlreadyLocked(LockError): """ pass + class LockFailed(LockError): """Lock file creation failed for some other reason. @@ -125,6 +127,7 @@ class LockFailed(LockError): """ pass + class UnlockError(Error): """ Base class for errors arising from attempts to release the lock. @@ -136,6 +139,7 @@ class UnlockError(Error): """ pass + class NotLocked(UnlockError): """Raised when an attempt is made to unlock an unlocked file. @@ -146,6 +150,7 @@ class NotLocked(UnlockError): """ pass + class NotMyLock(UnlockError): """Raised when an attempt is made to unlock a file someone else locked. @@ -156,6 +161,7 @@ class NotMyLock(UnlockError): """ pass + class _SharedBase(object): def __init__(self, path): self.path = path @@ -200,6 +206,7 @@ def __exit__(self, *_exc): def __repr__(self): return "<%s: %r>" % (self.__class__.__name__, self.path) + class LockBase(_SharedBase): """Base class for platform-specific lock classes.""" def __init__(self, path, threaded=True, timeout=None): @@ -257,6 +264,7 @@ def __repr__(self): return "<%s: %r -- %r>" % (self.__class__.__name__, self.unique_name, self.path) + def _fl_helper(cls, mod, *args, **kwds): warnings.warn("Import from %s module instead of lockfile package" % mod, DeprecationWarning, stacklevel=2) @@ -270,6 +278,7 @@ def _fl_helper(cls, mod, *args, **kwds): kwds["threaded"] = True return cls(*args, **kwds) + def LinkFileLock(*args, **kwds): """Factory function provided for backwards compatibility. @@ -280,6 +289,7 @@ def LinkFileLock(*args, **kwds): return _fl_helper(linklockfile.LinkLockFile, "lockfile.linklockfile", *args, **kwds) + def MkdirFileLock(*args, **kwds): """Factory function provided for backwards compatibility. @@ -290,6 +300,7 @@ def MkdirFileLock(*args, **kwds): return _fl_helper(mkdirlockfile.MkdirLockFile, "lockfile.mkdirlockfile", *args, **kwds) + def SQLiteFileLock(*args, **kwds): """Factory function provided for backwards compatibility. @@ -300,6 +311,7 @@ def SQLiteFileLock(*args, **kwds): return _fl_helper(sqlitelockfile.SQLiteLockFile, "lockfile.sqlitelockfile", *args, **kwds) + def locked(path, timeout=None): """Decorator which enables locks for decorated function. @@ -324,6 +336,7 @@ def wrapper(*args, **kwargs): return wrapper return decor + if hasattr(os, "link"): from . import linklockfile as _llf LockFile = _llf.LinkLockFile @@ -332,4 +345,3 @@ def wrapper(*args, **kwargs): LockFile = _mlf.MkdirLockFile FileLock = LockFile - diff --git a/lib/lockfile/linklockfile.py b/lib/lockfile/linklockfile.py index ab5e1f21e1..2ca9be0423 100644 --- a/lib/lockfile/linklockfile.py +++ b/lib/lockfile/linklockfile.py @@ -6,6 +6,7 @@ from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout, AlreadyLocked) + class LinkLockFile(LockBase): """Lock access to a file using atomic property of link(2). @@ -46,7 +47,7 @@ def acquire(self, timeout=None): else: raise AlreadyLocked("%s is already locked" % self.path) - time.sleep(timeout is not None and timeout/10 or 0.1) + time.sleep(timeout is not None and timeout / 10 or 0.1) else: # Link creation succeeded. We're good to go. return @@ -70,4 +71,3 @@ def i_am_locking(self): def break_lock(self): if os.path.exists(self.lock_file): os.unlink(self.lock_file) - diff --git a/lib/lockfile/mkdirlockfile.py b/lib/lockfile/mkdirlockfile.py index caf462e840..05a8c96ca5 100644 --- a/lib/lockfile/mkdirlockfile.py +++ b/lib/lockfile/mkdirlockfile.py @@ -8,6 +8,7 @@ from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout, AlreadyLocked) + class MkdirLockFile(LockBase): """Lock file by creating a directory.""" def __init__(self, path, threaded=True, timeout=None): @@ -18,10 +19,10 @@ def __init__(self, path, threaded=True, timeout=None): LockBase.__init__(self, path, threaded, timeout) # Lock file itself is a directory. Place the unique file name into # it. - self.unique_name = os.path.join(self.lock_file, - "%s.%s%s" % (self.hostname, - self.tname, - self.pid)) + self.unique_name = os.path.join(self.lock_file, + "%s.%s%s" % (self.hostname, + self.tname, + self.pid)) def acquire(self, timeout=None): timeout = timeout if timeout is not None else self.timeout diff --git a/lib/lockfile/pidlockfile.py b/lib/lockfile/pidlockfile.py index c4c8a39adb..069e85b15b 100644 --- a/lib/lockfile/pidlockfile.py +++ b/lib/lockfile/pidlockfile.py @@ -14,9 +14,8 @@ from __future__ import absolute_import -import os -import sys import errno +import os import time from . import (LockBase, AlreadyLocked, LockFailed, NotLocked, NotMyLock, @@ -38,8 +37,6 @@ def __init__(self, path, threaded=False, timeout=None): # pid lockfiles don't support threaded operation, so always force # False as the threaded arg. LockBase.__init__(self, path, False, timeout) - dirname = os.path.dirname(self.lock_file) - basename = os.path.split(self.path)[-1] self.unique_name = self.path def read_pid(self): @@ -89,7 +86,7 @@ def acquire(self, timeout=None): else: raise AlreadyLocked("%s is already locked" % self.path) - time.sleep(timeout is not None and timeout/10 or 0.1) + time.sleep(timeout is not None and timeout / 10 or 0.1) else: raise LockFailed("failed to create %s" % self.path) else: @@ -117,6 +114,7 @@ def break_lock(self): """ remove_existing_pidfile(self.path) + def read_pid_from_pidfile(pidfile_path): """ Read the PID recorded in the named PID file. @@ -132,10 +130,10 @@ def read_pid_from_pidfile(pidfile_path): pass else: # According to the FHS 2.3 section on PID files in /var/run: - # + # # The file must consist of the process identifier in # ASCII-encoded decimal, followed by a newline character. - # + # # Programs that read PID files should be somewhat flexible # in what they accept; i.e., they should ignore extra # whitespace, leading zeroes, absence of the trailing @@ -171,8 +169,7 @@ def write_pid_to_pidfile(pidfile_path): # would contain three characters: two, five, and newline. pid = os.getpid() - line = "%(pid)d\n" % vars() - pidfile.write(line) + pidfile.write("%s\n" % pid) pidfile.close() diff --git a/lib/lockfile/sqlitelockfile.py b/lib/lockfile/sqlitelockfile.py index 734ce0308b..f997e2444e 100644 --- a/lib/lockfile/sqlitelockfile.py +++ b/lib/lockfile/sqlitelockfile.py @@ -10,6 +10,7 @@ from . import LockBase, NotLocked, NotMyLock, LockTimeout, AlreadyLocked + class SQLiteLockFile(LockBase): "Demonstrate SQL-based locking." @@ -34,7 +35,7 @@ def __init__(self, path, threaded=True, timeout=None): import sqlite3 self.connection = sqlite3.connect(SQLiteLockFile.testdb) - + c = self.connection.cursor() try: c.execute("create table locks" @@ -97,7 +98,7 @@ def acquire(self, timeout=None): if len(rows) == 1: # We're the locker, so go home. return - + # Maybe we should wait a bit longer. if timeout is not None and time.time() > end_time: if timeout > 0: @@ -130,7 +131,7 @@ def _who_is_locking(self): " where lock_file = ?", (self.lock_file,)) return cursor.fetchone()[0] - + def is_locked(self): cursor = self.connection.cursor() cursor.execute("select * from locks" diff --git a/lib/lockfile/symlinklockfile.py b/lib/lockfile/symlinklockfile.py index b5dd71f857..23b41f582b 100644 --- a/lib/lockfile/symlinklockfile.py +++ b/lib/lockfile/symlinklockfile.py @@ -1,11 +1,12 @@ from __future__ import absolute_import -import time import os +import time -from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout, +from . import (LockBase, NotLocked, NotMyLock, LockTimeout, AlreadyLocked) + class SymlinkLockFile(LockBase): """Lock access to a file using symlink(2).""" @@ -17,10 +18,10 @@ def __init__(self, path, threaded=True, timeout=None): def acquire(self, timeout=None): # Hopefully unnecessary for symlink. - #try: - # open(self.unique_name, "wb").close() - #except IOError: - # raise LockFailed("failed to create %s" % self.unique_name) + # try: + # open(self.unique_name, "wb").close() + # except IOError: + # raise LockFailed("failed to create %s" % self.unique_name) timeout = timeout if timeout is not None else self.timeout end_time = time.time() if timeout is not None and timeout > 0: @@ -45,7 +46,7 @@ def acquire(self, timeout=None): else: raise AlreadyLocked("%s is already locked" % self.path) - time.sleep(timeout/10 if timeout is not None else 0.1) + time.sleep(timeout / 10 if timeout is not None else 0.1) else: # Link creation succeeded. We're good to go. return @@ -61,8 +62,8 @@ def is_locked(self): return os.path.islink(self.lock_file) def i_am_locking(self): - return os.path.islink(self.lock_file) and \ - os.readlink(self.lock_file) == self.unique_name + return (os.path.islink(self.lock_file) + and os.readlink(self.lock_file) == self.unique_name) def break_lock(self): if os.path.islink(self.lock_file): # exists && link diff --git a/lib/mako/__init__.py b/lib/mako/__init__.py index 74526ecce7..1360163461 100644 --- a/lib/mako/__init__.py +++ b/lib/mako/__init__.py @@ -1,8 +1,8 @@ # mako/__init__.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -__version__ = '1.0.3' +__version__ = '1.0.4' diff --git a/lib/mako/_ast_util.py b/lib/mako/_ast_util.py index cc298d5866..8d19b0d751 100644 --- a/lib/mako/_ast_util.py +++ b/lib/mako/_ast_util.py @@ -1,5 +1,5 @@ # mako/_ast_util.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/lib/mako/ast.py b/lib/mako/ast.py index c55b29c939..8d2d150cd6 100644 --- a/lib/mako/ast.py +++ b/lib/mako/ast.py @@ -1,5 +1,5 @@ # mako/ast.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/lib/mako/cache.py b/lib/mako/cache.py index c7aabd2b44..94f387018f 100644 --- a/lib/mako/cache.py +++ b/lib/mako/cache.py @@ -1,5 +1,5 @@ # mako/cache.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/lib/mako/cmd.py b/lib/mako/cmd.py index 50d47fcb4f..dd1f833e77 100755 --- a/lib/mako/cmd.py +++ b/lib/mako/cmd.py @@ -1,5 +1,5 @@ # mako/cmd.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/lib/mako/codegen.py b/lib/mako/codegen.py index bf86d795bf..d4ecbe8bb3 100644 --- a/lib/mako/codegen.py +++ b/lib/mako/codegen.py @@ -1,5 +1,5 @@ # mako/codegen.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/lib/mako/compat.py b/lib/mako/compat.py index db22b991f6..a2ab243427 100644 --- a/lib/mako/compat.py +++ b/lib/mako/compat.py @@ -5,6 +5,7 @@ py33 = sys.version_info >= (3, 3) py2k = sys.version_info < (3,) py26 = sys.version_info >= (2, 6) +py27 = sys.version_info >= (2, 7) jython = sys.platform.startswith('java') win32 = sys.platform.startswith('win') pypy = hasattr(sys, 'pypy_version_info') diff --git a/lib/mako/exceptions.py b/lib/mako/exceptions.py index 84d2297b91..cb6fb3f708 100644 --- a/lib/mako/exceptions.py +++ b/lib/mako/exceptions.py @@ -1,5 +1,5 @@ # mako/exceptions.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/lib/mako/ext/autohandler.py b/lib/mako/ext/autohandler.py index 9ee780a6ae..9d1c9114aa 100644 --- a/lib/mako/ext/autohandler.py +++ b/lib/mako/ext/autohandler.py @@ -1,5 +1,5 @@ # ext/autohandler.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/lib/mako/ext/babelplugin.py b/lib/mako/ext/babelplugin.py index 53d62baabd..0b5e84faca 100644 --- a/lib/mako/ext/babelplugin.py +++ b/lib/mako/ext/babelplugin.py @@ -1,5 +1,5 @@ # ext/babelplugin.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/lib/mako/ext/preprocessors.py b/lib/mako/ext/preprocessors.py index 5624f701cc..9b700d1da9 100644 --- a/lib/mako/ext/preprocessors.py +++ b/lib/mako/ext/preprocessors.py @@ -1,5 +1,5 @@ # ext/preprocessors.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/lib/mako/ext/pygmentplugin.py b/lib/mako/ext/pygmentplugin.py index 1121c5d3d2..4057caa670 100644 --- a/lib/mako/ext/pygmentplugin.py +++ b/lib/mako/ext/pygmentplugin.py @@ -1,5 +1,5 @@ # ext/pygmentplugin.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/lib/mako/ext/turbogears.py b/lib/mako/ext/turbogears.py index 2e7d03909a..eaa2d78000 100644 --- a/lib/mako/ext/turbogears.py +++ b/lib/mako/ext/turbogears.py @@ -1,5 +1,5 @@ # ext/turbogears.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/lib/mako/filters.py b/lib/mako/filters.py index 525aeb8e55..c082690c4d 100644 --- a/lib/mako/filters.py +++ b/lib/mako/filters.py @@ -1,5 +1,5 @@ # mako/filters.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/lib/mako/lexer.py b/lib/mako/lexer.py index 2fa08e4ac3..cf4187f711 100644 --- a/lib/mako/lexer.py +++ b/lib/mako/lexer.py @@ -1,5 +1,5 @@ # mako/lexer.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -95,31 +95,37 @@ def match_reg(self, reg): # (match and "TRUE" or "FALSE") return match - def parse_until_text(self, *text): + def parse_until_text(self, watch_nesting, *text): startpos = self.match_position text_re = r'|'.join(text) brace_level = 0 + paren_level = 0 + bracket_level = 0 while True: match = self.match(r'#.*\n') if match: continue - match = self.match(r'(\"\"\"|\'\'\'|\"|\')((? 0: - brace_level -= 1 - continue + if match and not (watch_nesting + and (brace_level > 0 or paren_level > 0 + or bracket_level > 0)): return \ self.text[startpos: self.match_position - len(match.group(1))],\ match.group(1) - match = self.match(r"(.*?)(?=\"|\'|#|%s)" % text_re, re.S) + elif not match: + match = self.match(r"(.*?)(?=\"|\'|#|%s)" % text_re, re.S) if match: brace_level += match.group(1).count('{') brace_level -= match.group(1).count('}') + paren_level += match.group(1).count('(') + paren_level -= match.group(1).count(')') + bracket_level += match.group(1).count('[') + bracket_level -= match.group(1).count(']') continue raise exceptions.SyntaxException( "Expected: %s" % @@ -368,7 +374,7 @@ def match_python_block(self): match = self.match(r"<%(!)?") if match: line, pos = self.matched_lineno, self.matched_charpos - text, end = self.parse_until_text(r'%>') + text, end = self.parse_until_text(False, r'%>') # the trailing newline helps # compiler.parse() not complain about indentation text = adjust_whitespace(text) + "\n" @@ -384,9 +390,9 @@ def match_expression(self): match = self.match(r"\${") if match: line, pos = self.matched_lineno, self.matched_charpos - text, end = self.parse_until_text(r'\|', r'}') + text, end = self.parse_until_text(True, r'\|', r'}') if end == '|': - escapes, end = self.parse_until_text(r'}') + escapes, end = self.parse_until_text(True, r'}') else: escapes = "" text = text.replace('\r\n', '\n') diff --git a/lib/mako/lookup.py b/lib/mako/lookup.py index e6dff9d2fc..a9c5bb2a03 100644 --- a/lib/mako/lookup.py +++ b/lib/mako/lookup.py @@ -1,5 +1,5 @@ # mako/lookup.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/lib/mako/parsetree.py b/lib/mako/parsetree.py index e7af4bc0f3..879882e675 100644 --- a/lib/mako/parsetree.py +++ b/lib/mako/parsetree.py @@ -1,5 +1,5 @@ # mako/parsetree.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/lib/mako/pygen.py b/lib/mako/pygen.py index 5d87bbd286..8514e0226b 100644 --- a/lib/mako/pygen.py +++ b/lib/mako/pygen.py @@ -1,5 +1,5 @@ # mako/pygen.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/lib/mako/pyparser.py b/lib/mako/pyparser.py index 96e533507b..15d0da6efc 100644 --- a/lib/mako/pyparser.py +++ b/lib/mako/pyparser.py @@ -1,5 +1,5 @@ # mako/pyparser.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/lib/mako/runtime.py b/lib/mako/runtime.py index 8d2f4a9e56..5c403812c5 100644 --- a/lib/mako/runtime.py +++ b/lib/mako/runtime.py @@ -1,5 +1,5 @@ # mako/runtime.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/lib/mako/template.py b/lib/mako/template.py index afb679a528..bacbc132b5 100644 --- a/lib/mako/template.py +++ b/lib/mako/template.py @@ -1,5 +1,5 @@ # mako/template.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -475,6 +475,14 @@ def get_def(self, name): return DefTemplate(self, getattr(self.module, "render_%s" % name)) + def list_defs(self): + """return a list of defs in the template. + + .. versionadded:: 1.0.4 + + """ + return [i[7:] for i in dir(self.module) if i[:7] == 'render_'] + def _get_def_callable(self, name): return getattr(self.module, "render_%s" % name) diff --git a/lib/mako/util.py b/lib/mako/util.py index c7dad655a3..2f089ff404 100644 --- a/lib/mako/util.py +++ b/lib/mako/util.py @@ -1,5 +1,5 @@ # mako/util.py -# Copyright (C) 2006-2015 the Mako authors and contributors +# Copyright (C) 2006-2016 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/lib/markupsafe/_speedups.pyd b/lib/markupsafe/_speedups.pyd new file mode 100644 index 0000000000000000000000000000000000000000..266e39910664a0bf86c20be513f7b6ac61723e23 GIT binary patch literal 8704 zcmeHMe{@sVeZP_{V;hVdf`UnKc*Zu+0Y1idhCPMM!Zuu`&(y&N93T}!dd3Q}{<`1$z4w0KS5m{Kz0AfK%RtvO#ts5ftI?nT_2UBAGv^viK2_M<7K}+9(a83wuU+!{!r_Q2ZBwLZJS+vnQr+j8r1nTaDb3BzDK<_2 z;ESUNM(1@8r~Ac#Wq1VRi-CKFFYEN(!>9Cd=J2m{IA{2h4rl9dUC`e~x}W4}V?ASm zzp%6K&5|~zWyaV7X}UEV&Vw@Bpz^tZc{;w!LV$29V|IF=r6Jpj0R;=q;227#E~s~o z4a)vSjI9N?vWT%pQckxT#_IG5O#+yU)4bmWN{sQzcJ9zERh6A8Xv=f)AQx%v<~o4s zO=4_oX*A$deT;oH4I~g)7WypoNv#IQqtp;%Wh7UIPPXnvpVVp?>n-gt1obt7pa<># zNkTwDt|^Twp@^Tc2Z==iqTovClUfa9t0oOUMgI>XP}9V_E=%2MvydAj=2rkK#Jf$pgCKHd=HW9S3SN1l~4L z&A9?oa52QYd*hkB>!`H3pC_FQ$)Y%>ISYx~Wz%@V{wh@BIb76cadARxEISCFz$1;5 zMypxB!QJd$@7_RRQhS$Og)uE^q=EMAetm9@#@hk5dWul}2+Ip)_f<~U$&#k6*h z>uhXcYuET6$L@6;=;h)oua1?|{IAHiKHh!Y@zq}0mf~GUtfQwIy066V_Gp=^ZT1ZCcVJI&-&BZ^r?sdap zHVpEvOS-|UFvyclR>ZuJjA5;RIakv(E@-V-zD(a_4?pFec)`=2XE3K=@jz-#0lEXGR+wWi|gC2 zmQ|e5kMAE4scRZmJml#98Yzj5nEbT1u8f?;qu)5ensXXNWwANS?Ku2*FFezXv>NvE@bK<+aD^%chl>WZZg zkX1BiayqT;XjQ*fvH+-7QjDjOQx7rhtL7{s32_YJ)SM2&6ZRL#X2Sjr0^R51_GGPP zw1{^VvD9lua=QhP`g;R^7xSdP2^{HI--VMWxj4!bEm=I#md6u2^X0@=i87FTBjun9 zT%ZfG7;&9*bXUOxVYg!**MQ@hB8;?Sb>eEm{yu`HaFJ0`#>l&ttkFE~>R0bQl+sr< zCx@mR;>!!ugJpg1PPU7mvBXtGkkr}BY2nZr& zHv{#33|#~23?ohz*uc?;aFDK(E6Loq0S4?{kfE@qW`gY+i0_8Lj}3tr0aDkn8w2*o zAcd5~TvzG(4DhGz8$sdKE876nLRj+xNj(K&On6s=IC_}GJ>rz+tOrN4+we4AUC4qp zT!e}_UA;zG1n^40sd9VitwOh9;%s&3XUpA;GgamhW8$VPe)VH=HqnwwIR6%! ztZL_XQOw12D$avE@*3EPn`-a6X^mHKVV_Bh!A;822E|YUx2F5NdN)$KIEURyRU((9 z$&TGgC{M7QSd78y(k$@@@K#$6Ud0(w`z=;CF&IP2kww9Gun!F|K0VEFYj1RGa744k zS4>##hwzP|`;Hf#5Wh&s#S*W{HW$rC{+#rb?sTSwyx6mWTk?zGcL?dob?a)0Q5IDj=BMZpEYH<|>-a8_Y#whN| z+#r(6$aRPs`GbpJ;E+EFqUOv*mCX@9=!|Q47gLgcDicTQ&kw`GeectfKcz)$~y? zKI8CoxZYa*NWSB_-e32hl>P1pkw<xGy#T#$eY4%cxb)?}(aKGLCGZmE6>yvyou6l6&;$K~Ts(9jZLuwekm` zvUsNE^n#HtP0hIH8*vx*TlbJspty|jpO*K$v+Ns7Fs8)BZww+C4ud`V1W(FTOb2{1ej_jx# zk`o1C%;vfA2CxvhrC%m}%dzS+Fr<|n)t@ix&lm8lS&tP|=YT3btqU&VoIxUm-{(YQ_L5Za#$oy~bVYaog^K;uCSNy7XjiS0$HQJ;`gW>Ir)mj=mJ?$NlsOmL9 zLnIInDeO8l)`x?Bd@;SgwyE4(rx5CHs`Yr-EZbDx>S(0hlw}{lo;g#kFBDoG5Bt?% zBn*-}x;@5N$!$;4Dr_%n?5vMQy|p357iHs9Ef~gFW`?)1)4MhjR^WNuk1uT<&{`FV zguKn29SWNZt<|B3Pc?>YBdsf(9zBJT90_kXnCs!c*Aot^##$@Ae^|*nI@PvFxO_!v zAQVE%$Y@vEV+v*&Bh25?i9)(*L+!fiGF@PW#jAuVAl{BhFsv$3w$|!x$9f}vuRp4? zpIN*?*axGXoxY8G74~9AYgADft4do4c*Ai_xy<4X=pml4c#(O1?Xm4%WoHn6Jj%S@ zT5qgF@dsOjes7yE90*}rwu$H+9WDM2b#*XAp8Odoenc|tRS_8Y(HilFeeFsNDEMI? z*7v;48xOD)2y_5b?UIyYf^eMnnH@w!7j48V9d8Iq%^lh6<^dEo&Y|~ zO`Z2h{m^>RhIu~+Hbk^U{?B614lw{#!E z_$~Ac=y@o9#sC?kP%43VN367?Gr(xGCMe)8EUKt<8L(Q1zV;3shExJ<;dTSI`=UEw z!`F%(ZP0Pz!n(IjZ4Y_9gex(>4+nHgGiZxrpOKb5BpF6>a%v;tCzPlvscnknZ}UZc zer#7vTDVktaH;grQfbjr3A-*WpdXE5`t_A!k{XeCbA!B$e8`C@e-g*JRAQx)v_7W5 z5`K6Jwo09mxD>{6NjeaWb%cDK68-W40iPH z7?Yxk8jpshCw!r}BKcyb_ohRo<|>W#d1KhIFb=O719N=GvgP0oMKTnmZE@WZ3PVhy z%T6Bq!jjV7p>|H1BNk~@af0>qL{m;4FOKMmP7kdSrktDxALj6-qh=~nWP+*`jD;7e zk}pKZx)aN6rkH#I5%Q&X#tb%MNx$47P~;z?xy|R_K@OVs(u;>ba+y)z;Qhag&LMpq zrMA_i*DN>a`)5z(H=6W)CcJP5f4@mLtbOwH_PH#be601tpNjwVN1juUNoQKm(npg| z+ms04$Bt^$hF>p4%>tONDG5^DxF7V%^&{1hSMYn_Tc8u%gt~7G_~iPJ>c?B?Sqluf zbT+n#%zlWz1^ABub5ZLBNCvPTb=)4{^jmE=`cdG| z0oI|mI}4oPW^@U4*A~F%(65sW;7Jp2ypOTJLT8ZK5BLT;$)5#$8uj5~;8bV!qwj() z)tFQ_5>C*Hx{z?H50{(xlz9Gs8%T`d>u|R9+j!vHU@Vc<^zfdRo~=D?Jv)1L_3Y{S NdJl-7w%^ML{0B>`@Z 2: + from http.client import HTTPS_PORT + from http.client import HTTPConnection + + from urllib.request import AbstractHTTPHandler +else: + from httplib import HTTPS_PORT + from httplib import HTTPConnection + + from urllib2 import AbstractHTTPHandler from OpenSSL import SSL @@ -81,7 +90,8 @@ def connect(self): def close(self): """Close socket and shut down SSL connection""" - self.sock.close() + if hasattr(self.sock, "close"): + self.sock.close() class HTTPSContextHandler(AbstractHTTPHandler): @@ -106,7 +116,7 @@ def __init__(self, ssl_context, debuglevel=0): ssl_context) self.ssl_context = ssl_context else: - self.ssl_context = SSL.Context(SSL.SSLv23_METHOD) + self.ssl_context = SSL.Context(SSL.TLSv1_METHOD) def https_open(self, req): """Opens HTTPS request diff --git a/lib/ndg/httpsclient/ssl_context_util.py b/lib/ndg/httpsclient/ssl_context_util.py index 5e11501764..0ed1d32ede 100644 --- a/lib/ndg/httpsclient/ssl_context_util.py +++ b/lib/ndg/httpsclient/ssl_context_util.py @@ -8,7 +8,12 @@ __license__ = "BSD - see LICENSE file in top-level directory" __contact__ = "Philip.Kershaw@stfc.ac.uk" __revision__ = '$Id$' -import urlparse +import sys + +if sys.version_info[0] > 2: + import urllib.parse as urlparse_ +else: + import urlparse as urlparse_ from OpenSSL import SSL @@ -85,7 +90,7 @@ def set_peer_verification_for_url_hostname(ssl_context, url, '''Convenience routine to set peer verification callback based on ServerSSLCertVerification class''' if not if_verify_enabled or (ssl_context.get_verify_mode() & SSL.VERIFY_PEER): - urlObj = urlparse.urlparse(url) + urlObj = urlparse_.urlparse(url) hostname = urlObj.hostname server_ssl_cert_verif = ServerSSLCertVerification(hostname=hostname) verify_callback_ = server_ssl_cert_verif.get_verify_server_cert_func() diff --git a/lib/ndg/httpsclient/ssl_peer_verification.py b/lib/ndg/httpsclient/ssl_peer_verification.py index 57f65e2ba2..2de390b7a1 100644 --- a/lib/ndg/httpsclient/ssl_peer_verification.py +++ b/lib/ndg/httpsclient/ssl_peer_verification.py @@ -14,7 +14,8 @@ from ndg.httpsclient.subj_alt_name import SubjectAltName from pyasn1.codec.der import decoder as der_decoder SUBJ_ALT_NAME_SUPPORT = True -except ImportError, e: + +except ImportError as e: SUBJ_ALT_NAME_SUPPORT = False SUBJ_ALT_NAME_SUPPORT_MSG = ( 'SubjectAltName support is disabled - check pyasn1 package ' @@ -40,7 +41,8 @@ class ServerSSLCertVerification(object): 'userid': 'UID' } SUBJ_ALT_NAME_EXT_NAME = 'subjectAltName' - PARSER_RE_STR = '/(%s)=' % '|'.join(DN_LUT.keys() + DN_LUT.values()) + PARSER_RE_STR = '/(%s)=' % '|'.join(list(DN_LUT.keys()) + \ + list(DN_LUT.values())) PARSER_RE = re.compile(PARSER_RE_STR) __slots__ = ('__hostname', '__certDN', '__subj_alt_name_match') @@ -156,12 +158,12 @@ def __call__(self, connection, peerCert, errorStatus, errorDepth, return preverifyOK def get_verify_server_cert_func(self): - def verify_server_cert(connection, peerCert, errorStatus, errorDepth, - preverifyOK): - return self.__call__(connection, peerCert, errorStatus, - errorDepth, preverifyOK) - - return verify_server_cert + def verify_server_cert(connection, peerCert, errorStatus, errorDepth, + preverifyOK): + return self.__call__(connection, peerCert, errorStatus, + errorDepth, preverifyOK) + + return verify_server_cert @classmethod def _get_subj_alt_name(cls, peer_cert): @@ -195,7 +197,7 @@ def _getCertDN(self): return self.__certDN def _setCertDN(self, val): - if isinstance(val, basestring): + if isinstance(val, str): # Allow for quoted DN certDN = val.strip('"') @@ -203,7 +205,7 @@ def _setCertDN(self, val): if len(dnFields) < 2: raise TypeError('Error parsing DN string: "%s"' % certDN) - self.__certDN = zip(dnFields[1::2], dnFields[2::2]) + self.__certDN = list(zip(dnFields[1::2], dnFields[2::2])) self.__certDN.sort() elif not isinstance(val, list): @@ -226,7 +228,7 @@ def _getHostname(self): return self.__hostname def _setHostname(self, val): - if not isinstance(val, basestring): + if not isinstance(val, str): raise TypeError("Expecting string type for hostname " "attribute") self.__hostname = val diff --git a/lib/ndg/httpsclient/ssl_socket.py b/lib/ndg/httpsclient/ssl_socket.py index ab7f21698e..778031493c 100644 --- a/lib/ndg/httpsclient/ssl_socket.py +++ b/lib/ndg/httpsclient/ssl_socket.py @@ -12,7 +12,7 @@ from datetime import datetime import logging import socket -from cStringIO import StringIO +from io import BytesIO from OpenSSL import SSL @@ -61,22 +61,20 @@ def buf_size(self): @buf_size.setter def buf_size(self, value): """Buffer size for makefile method recv() operations""" - if not isinstance(value, (int, long)): - raise TypeError('Expecting int or long type for "buf_size"; ' + if not isinstance(value, int): + raise TypeError('Expecting int type for "buf_size"; ' 'got %r instead' % type(value)) self.__buf_size = value def close(self): """Shutdown the SSL connection and call the close method of the underlying socket""" -# try: -# self.__ssl_conn.shutdown() -# except SSL.Error: -# # Make errors on shutdown non-fatal -# pass - if self._makefile_refs < 1: - self.__ssl_conn.shutdown() + try: + self.__ssl_conn.shutdown() + except (SSL.Error, SSL.SysCallError): + # Make errors on shutdown non-fatal + pass else: self._makefile_refs -= 1 @@ -236,7 +234,7 @@ def makefile(self, *args): _buf_size = self.buf_size i=0 - stream = StringIO() + stream = BytesIO() startTime = datetime.utcnow() try: dat = self.__ssl_conn.recv(_buf_size) @@ -261,17 +259,6 @@ def makefile(self, *args): stream.seek(0) return stream - -# def makefile(self, mode='r', bufsize=-1): -# -# """Make and return a file-like object that -# works with the SSL connection. Just use the code -# from the socket module.""" -# -# self._makefile_refs += 1 -# # close=True so as to decrement the reference count when done with -# # the file-like object. -# return socket._fileobject(self.socket, mode, bufsize, close=True) def getsockname(self): """ diff --git a/lib/ndg/httpsclient/subj_alt_name.py b/lib/ndg/httpsclient/subj_alt_name.py index 6bd8e0d8ac..b2c1918b86 100644 --- a/lib/ndg/httpsclient/subj_alt_name.py +++ b/lib/ndg/httpsclient/subj_alt_name.py @@ -14,7 +14,7 @@ try: from pyasn1.type import univ, constraint, char, namedtype, tag -except ImportError, e: +except ImportError as e: import_error_msg = ('Error importing pyasn1, subjectAltName check for SSL ' 'peer verification will be disabled. Import error ' 'is: %s' % e) diff --git a/lib/ndg/httpsclient/test/pki/ca/08bd99c7.0 b/lib/ndg/httpsclient/test/pki/ca/08bd99c7.0 new file mode 100644 index 0000000000..63ef7c302f --- /dev/null +++ b/lib/ndg/httpsclient/test/pki/ca/08bd99c7.0 @@ -0,0 +1,20 @@ +-----BEGIN CERTIFICATE----- +MIIDLjCCAhagAwIBAgIBATANBgkqhkiG9w0BAQUFADA3MREwDwYDVQQLDAhTZWN1 +cml0eTEUMBIGA1UEAwwLTkRHIFRlc3QgQ0ExDDAKBgNVBAoMA05ERzAeFw0xNTAx +MjExNDMzMThaFw0yMDAxMjAxNDMzMThaMDcxETAPBgNVBAsMCFNlY3VyaXR5MRQw +EgYDVQQDDAtOREcgVGVzdCBDQTEMMAoGA1UECgwDTkRHMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEArq4QKUTRq45nCDR/p+OlHIIN8+ugUbiCfteazbTG +rX8vIQ9HxSuz/xvxTw+E0KgA4YSK2SJJP4QiCjlMKYS3Rt8o361GNtnRmeo5qyBu +GMSv73XL1uuqumggUZyrhhksckR7gyNFnKVXzZjAQPepsT0xBjs5uEAEqXJzAf+r +24AnT3MZRh7gsyEe3sZjd75kZVwcrWhrocyKlMCR77yEr+uP4pg+dEMhDMKKxlaF +C5RPMotOpWm/7AToHrGia34WSmcxvuOwxOkI4xEW6mxWMaVTBCXUh6Wb/0m/x8Nv +9VvS2UBC4sCp4MqlDpySxQpT1RgrhMTEmtUOh50l4eEhdwIDAQABo0UwQzASBgNV +HRMBAf8ECDAGAQH/AgEAMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUkEvQjGOP +Oj5DZEvsm96AdiiFXWgwDQYJKoZIhvcNAQEFBQADggEBAGD0kQASmNzvtYL+JUGf +gTPyJhADl9Ai9GvZJsY/wX0IRTxRl5y08Dqlg3qyGG3GzL918cr1sVCYnLepNQES +T0MIz50DCKGryNSc74JHPDxpYaSV6whmNH5iwh8fy6tmJwF3FWbGXD2ddc+ofJqP +WPPJtzqxuuJ6iXQIFqD9mEn3iXVcvFuSzpdpH9paORTKB0j4gya9zctB8LP0ZXIE +//wREc+4msnmoTn+qkFAOPBg9WnvoipfyCXPgbTagxlofVjZ7gAgYIefqhXBTQdd +5tnYdyQQBRcUXQS2bBX03q8ftcxOjc3SvXI4MvrqofuFPwu4GnrspnC0KQYlXwEI +7ds= +-----END CERTIFICATE----- diff --git a/lib/ndg/httpsclient/test/pki/ca/ade0138a.0 b/lib/ndg/httpsclient/test/pki/ca/ade0138a.0 new file mode 100644 index 0000000000..63ef7c302f --- /dev/null +++ b/lib/ndg/httpsclient/test/pki/ca/ade0138a.0 @@ -0,0 +1,20 @@ +-----BEGIN CERTIFICATE----- +MIIDLjCCAhagAwIBAgIBATANBgkqhkiG9w0BAQUFADA3MREwDwYDVQQLDAhTZWN1 +cml0eTEUMBIGA1UEAwwLTkRHIFRlc3QgQ0ExDDAKBgNVBAoMA05ERzAeFw0xNTAx +MjExNDMzMThaFw0yMDAxMjAxNDMzMThaMDcxETAPBgNVBAsMCFNlY3VyaXR5MRQw +EgYDVQQDDAtOREcgVGVzdCBDQTEMMAoGA1UECgwDTkRHMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEArq4QKUTRq45nCDR/p+OlHIIN8+ugUbiCfteazbTG +rX8vIQ9HxSuz/xvxTw+E0KgA4YSK2SJJP4QiCjlMKYS3Rt8o361GNtnRmeo5qyBu +GMSv73XL1uuqumggUZyrhhksckR7gyNFnKVXzZjAQPepsT0xBjs5uEAEqXJzAf+r +24AnT3MZRh7gsyEe3sZjd75kZVwcrWhrocyKlMCR77yEr+uP4pg+dEMhDMKKxlaF +C5RPMotOpWm/7AToHrGia34WSmcxvuOwxOkI4xEW6mxWMaVTBCXUh6Wb/0m/x8Nv +9VvS2UBC4sCp4MqlDpySxQpT1RgrhMTEmtUOh50l4eEhdwIDAQABo0UwQzASBgNV +HRMBAf8ECDAGAQH/AgEAMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUkEvQjGOP +Oj5DZEvsm96AdiiFXWgwDQYJKoZIhvcNAQEFBQADggEBAGD0kQASmNzvtYL+JUGf +gTPyJhADl9Ai9GvZJsY/wX0IRTxRl5y08Dqlg3qyGG3GzL918cr1sVCYnLepNQES +T0MIz50DCKGryNSc74JHPDxpYaSV6whmNH5iwh8fy6tmJwF3FWbGXD2ddc+ofJqP +WPPJtzqxuuJ6iXQIFqD9mEn3iXVcvFuSzpdpH9paORTKB0j4gya9zctB8LP0ZXIE +//wREc+4msnmoTn+qkFAOPBg9WnvoipfyCXPgbTagxlofVjZ7gAgYIefqhXBTQdd +5tnYdyQQBRcUXQS2bBX03q8ftcxOjc3SvXI4MvrqofuFPwu4GnrspnC0KQYlXwEI +7ds= +-----END CERTIFICATE----- diff --git a/lib/ndg/httpsclient/test/pki/localhost.crt b/lib/ndg/httpsclient/test/pki/localhost.crt index 257a5d5d09..953c7efe7e 100644 --- a/lib/ndg/httpsclient/test/pki/localhost.crt +++ b/lib/ndg/httpsclient/test/pki/localhost.crt @@ -1,14 +1,82 @@ +Certificate: + Data: + Version: 3 (0x2) + Serial Number: 1 (0x1) + Signature Algorithm: sha1WithRSAEncryption + Issuer: OU=Security, CN=NDG Test CA, O=NDG + Validity + Not Before: Jan 21 14:45:01 2015 GMT + Not After : Jan 20 14:45:01 2018 GMT + Subject: O=NDG, OU=Security, CN=localhost + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + RSA Public Key: (2048 bit) + Modulus (2048 bit): + 00:e3:29:45:fc:56:2d:a2:21:b6:49:c6:6a:ef:b3: + ed:d3:32:47:77:d9:85:ef:e5:6a:db:70:8b:5d:41: + 4d:b5:76:f5:96:42:5a:f7:82:a5:bb:b3:e1:f9:ac: + c6:b7:71:61:f4:4d:dd:28:f6:b1:ef:65:dc:5a:8c: + 47:b1:17:38:e1:8a:5f:40:b0:bb:a0:87:61:a7:72: + f8:c1:a1:5f:3b:f7:94:b5:cb:c3:50:84:ef:a8:13: + d4:92:ff:af:3a:d1:31:42:90:4b:58:4c:84:47:a6: + 3a:a3:3d:c1:9a:43:3c:10:f6:8a:b5:97:11:b7:74: + ab:32:92:be:9a:fc:ef:5e:45:78:30:61:67:10:63: + 09:ef:61:b7:1c:47:cc:69:c9:e7:27:8f:4d:97:33: + 59:33:b8:47:89:86:4c:cd:a4:38:7c:d0:60:ee:52: + c8:e1:2a:f1:3b:9b:e9:7c:d5:af:88:33:91:9c:10: + 63:89:01:03:fb:26:5e:3f:61:c3:b4:f0:fb:1f:ad: + e8:d2:49:8e:2f:16:81:bb:9c:d6:a5:48:91:58:7d: + ac:ac:2c:02:8a:f2:f4:22:80:1f:8c:32:5b:b5:77: + d0:36:e9:27:9a:9f:31:67:d5:4e:32:8d:cf:ce:73: + ef:88:86:e9:3c:53:e6:09:55:02:2b:86:7d:91:8d: + fb:53 + Exponent: 65537 (0x10001) + X509v3 extensions: + X509v3 Basic Constraints: + CA:FALSE + Netscape Comment: + OpenSSL Generated Certificate + X509v3 Subject Key Identifier: + A3:77:23:B5:1C:98:85:C8:6D:31:40:1C:2F:20:57:CD:C9:36:74:1B + X509v3 Authority Key Identifier: + keyid:90:4B:D0:8C:63:8F:3A:3E:43:64:4B:EC:9B:DE:80:76:28:85:5D:68 + + X509v3 Subject Alternative Name: + DNS:localhost + Signature Algorithm: sha1WithRSAEncryption + 33:b4:87:0e:2c:71:88:6d:ab:cc:14:c8:3f:1e:8d:e5:ed:26: + 6b:b8:76:93:29:b1:0b:c5:e7:41:6f:14:62:8f:e1:81:bb:02: + 13:5c:b2:34:b7:94:f2:7d:1e:fe:e7:89:0b:2e:56:30:58:eb: + 90:d4:05:5b:18:d9:c5:68:61:c0:f6:f7:1a:0f:14:d8:89:8e: + ee:ec:59:b8:48:96:58:33:2e:98:95:56:c3:02:e9:93:cd:3f: + 4c:0d:b5:b5:b6:6e:6f:95:5f:65:eb:1a:ce:56:20:e2:72:d4: + f7:58:5f:c0:7b:49:5f:ac:6b:01:7b:c8:f0:13:19:03:dd:4e: + 05:55:f9:31:52:ea:45:eb:54:b9:4b:a2:3f:22:c7:11:47:8a: + 94:b4:e9:9e:c0:09:96:72:66:ba:01:d3:f3:00:6e:24:ca:a9: + 6d:8e:7f:0b:a0:fd:f9:c9:4f:3a:36:07:c7:4a:c7:c7:1f:c7: + e0:2d:c3:21:d0:44:68:81:38:af:ce:cb:38:be:db:02:3d:ba: + 62:00:43:94:22:c8:d7:43:cd:db:73:23:9d:28:aa:d6:4c:08: + 45:8f:b5:1d:04:c7:2b:8e:22:12:e6:af:cd:9c:13:db:c9:76: + f4:0c:10:25:fa:5c:46:77:7d:e5:ee:16:b4:f1:24:94:22:06: + 85:40:0c:5f -----BEGIN CERTIFICATE----- -MIICFjCCAX+gAwIBAgIBCjANBgkqhkiG9w0BAQQFADAzMQwwCgYDVQQKEwNOREcx -ETAPBgNVBAsTCFNlY3VyaXR5MRAwDgYDVQQDEwdUZXN0IENBMB4XDTEyMDIwODE2 -MTE1M1oXDTE3MDIwNjE2MTE1M1owNTERMA8GA1UECxMIU2VjdXJpdHkxEjAQBgNV -BAMTCWxvY2FsaG9zdDEMMAoGA1UEChMDTkRHMIGfMA0GCSqGSIb3DQEBAQUAA4GN -ADCBiQKBgQCdhZgzD0xusZqzdphETJPgb4QK/sdDpF8EOT/20bAuyRgGt7papJmc -6UtdgS5b9bGh6sRXx+vSKiTqq1ZFLOjnn3OQKhdrK2VU8XiD5rjuwTuNzser0uba -lTOW5/2yVab+uZ/vw4yxR64+KdyBuVopXV9STuh12Q0JSrXzdH82iQIDAQABozgw -NjAMBgNVHRMBAf8EAjAAMCYGA1UdEQQfMB2CCWxvY2FsaG9zdIIQbG9jYWxob3N0 -LmRvbWFpbjANBgkqhkiG9w0BAQQFAAOBgQBAAQCTkLfgYAjvm63KRXcE8djkYIVQ -LleHNrCad/v3zNFK0PPCjIeBSWlI/1bPhJDCpfwpvJLk86DrB97Q3IafU2ml7DkC -93bi3iaDy4jI1uskvlM516iaBQx1DCIa4gesluBAnZFvby8HX9y/A7tn5Ew2vdQJ -upkcCUswsU4MSA== +MIIDejCCAmKgAwIBAgIBATANBgkqhkiG9w0BAQUFADA3MREwDwYDVQQLDAhTZWN1 +cml0eTEUMBIGA1UEAwwLTkRHIFRlc3QgQ0ExDDAKBgNVBAoMA05ERzAeFw0xNTAx +MjExNDQ1MDFaFw0xODAxMjAxNDQ1MDFaMDUxDDAKBgNVBAoTA05ERzERMA8GA1UE +CxMIU2VjdXJpdHkxEjAQBgNVBAMTCWxvY2FsaG9zdDCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBAOMpRfxWLaIhtknGau+z7dMyR3fZhe/lattwi11BTbV2 +9ZZCWveCpbuz4fmsxrdxYfRN3Sj2se9l3FqMR7EXOOGKX0Cwu6CHYady+MGhXzv3 +lLXLw1CE76gT1JL/rzrRMUKQS1hMhEemOqM9wZpDPBD2irWXEbd0qzKSvpr8715F +eDBhZxBjCe9htxxHzGnJ5yePTZczWTO4R4mGTM2kOHzQYO5SyOEq8Tub6XzVr4gz +kZwQY4kBA/smXj9hw7Tw+x+t6NJJji8Wgbuc1qVIkVh9rKwsAory9CKAH4wyW7V3 +0DbpJ5qfMWfVTjKNz85z74iG6TxT5glVAiuGfZGN+1MCAwEAAaOBkjCBjzAJBgNV +HRMEAjAAMCwGCWCGSAGG+EIBDQQfFh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0aWZp +Y2F0ZTAdBgNVHQ4EFgQUo3cjtRyYhchtMUAcLyBXzck2dBswHwYDVR0jBBgwFoAU +kEvQjGOPOj5DZEvsm96AdiiFXWgwFAYDVR0RBA0wC4IJbG9jYWxob3N0MA0GCSqG +SIb3DQEBBQUAA4IBAQAztIcOLHGIbavMFMg/Ho3l7SZruHaTKbELxedBbxRij+GB +uwITXLI0t5TyfR7+54kLLlYwWOuQ1AVbGNnFaGHA9vcaDxTYiY7u7Fm4SJZYMy6Y +lVbDAumTzT9MDbW1tm5vlV9l6xrOViDictT3WF/Ae0lfrGsBe8jwExkD3U4FVfkx +UupF61S5S6I/IscRR4qUtOmewAmWcma6AdPzAG4kyqltjn8LoP35yU86NgfHSsfH +H8fgLcMh0ERogTivzss4vtsCPbpiAEOUIsjXQ83bcyOdKKrWTAhFj7UdBMcrjiIS +5q/NnBPbyXb0DBAl+lxGd33l7ha08SSUIgaFQAxf -----END CERTIFICATE----- diff --git a/lib/ndg/httpsclient/test/pki/localhost.key b/lib/ndg/httpsclient/test/pki/localhost.key index 17fe929e1f..6a11b49ece 100644 --- a/lib/ndg/httpsclient/test/pki/localhost.key +++ b/lib/ndg/httpsclient/test/pki/localhost.key @@ -1,15 +1,27 @@ -----BEGIN RSA PRIVATE KEY----- -MIICWwIBAAKBgQCdhZgzD0xusZqzdphETJPgb4QK/sdDpF8EOT/20bAuyRgGt7pa -pJmc6UtdgS5b9bGh6sRXx+vSKiTqq1ZFLOjnn3OQKhdrK2VU8XiD5rjuwTuNzser -0ubalTOW5/2yVab+uZ/vw4yxR64+KdyBuVopXV9STuh12Q0JSrXzdH82iQIDAQAB -AoGAejr+HTDT2FlMd9Gg2e6qGM+voHCO4vgbGsXp0nZnxgYY9K2Al3F+GXoWFxp0 -hLsj+UaY0Jy7art1JfuJ1+e/WTR+0s4c6IbZCy0fHF4i29wUI5lc0zSmtePgITOD -tvgtJ8ji+ESq7sRyXO0Eb8wFJPyLj3efoeBQUl8Om1XMYGECQQDLayMY8dgqZCMK -iRU0wrCgzu/1tNBv1hRwip+rOTiqqL+MAKSYg1XtWSlm2RojiNmBfvPo+7VrXZMu -Nt1cBoOtAkEAxj1TuJRmZMf1QFuvv6DLloMmhilGkFobWysUZW18J8FyM+vI5kvH -TjRp2ZGkSw7Fsl+MUpQdfNOkd7pilJd5zQJAPofWqCpf2tghdXGiVS+sACLc3NkS -Ye6bJeVXI9lZNAzfpPfloQRue6G2+miuglHlGsudyvblU/XV8pTnAwz1mQJACyu3 -hQYvwuwVoNvJyoWYE1IuoI7A4C+DrR5/VrvVrDPVaKGXv4pzn6+Ka20ukeAyObvy -n1CjXL5cXTbOiUsD3QJAPe8Rw/Nu3o76tZfWB3irvjZ/mUDPhEppSis5oJY/exoB -O96/99UXZNwSbDII0gjBPN2pd2kf/Ik3EQlxiryZuw== +MIIEpAIBAAKCAQEA4ylF/FYtoiG2ScZq77Pt0zJHd9mF7+Vq23CLXUFNtXb1lkJa +94Klu7Ph+azGt3Fh9E3dKPax72XcWoxHsRc44YpfQLC7oIdhp3L4waFfO/eUtcvD +UITvqBPUkv+vOtExQpBLWEyER6Y6oz3BmkM8EPaKtZcRt3SrMpK+mvzvXkV4MGFn +EGMJ72G3HEfMacnnJ49NlzNZM7hHiYZMzaQ4fNBg7lLI4SrxO5vpfNWviDORnBBj +iQED+yZeP2HDtPD7H63o0kmOLxaBu5zWpUiRWH2srCwCivL0IoAfjDJbtXfQNukn +mp8xZ9VOMo3PznPviIbpPFPmCVUCK4Z9kY37UwIDAQABAoIBAF29Fmg+l64kAzkG +a/JmaWmRgfRvCton7aSIGLpuzEZpxIYw86DXhNSkMkf5iDcCzs0lpHHW+y4y9m9X +G+50CsDnfM8RHxvrQdz7kLM2iDoSvIcYgoyjjtHo/Pt8Dy9SS+WP7ceOK7f1XJUo +Us/5lrvZQPwWTvVJa6v+6jDC13Qqp34qNXCBZvU2WJpjM8Yau3C6ixP2ifJMmoOV +5BT7bUPwn9QT49PFDLSbKKUnvr8ClhXF2hF2B4ztm3SIjhMe7kwuU+i8yWlFiGT+ +RzSvKGGA7QtDeww5vrMEpaudQaU0MvcKbsolk/MDh0Kcy3fKKz1OSZEvvZ1hCzlr +4flLOOECgYEA+YG32ohJ5QZnaQ+DXOl/QSzAlL5ZZeSSny25tkJetLKthTcSfHNH ++gWRFujONATuA7EmgkzNC+d+3JjYJIcVODb/22yoAVQFVXvz+Ihe5UyM14r/ZV/b +4w/dLvLpWnw17BaqDwl216xnNXa/Y4IzTXwgw2G8noTKlby6You0NMcCgYEA6RKu +95+y1xMIVdfMEExRcwUDFxFA9f1WFArq2D/GYiJhOQtjXJm5nQpsiczcF0xHvXjA +6YiwFBF8L6d77ZlYb1AoKeE2me/wtRqaZtAGqnzqS7fx06hgFD8FAGxtHYXW2Ozj +rKYEb3Xqkpko+XzuLIXaXSP/TcE2PuWMRa9IIRUCgYBNYx2KS3FX9seGP4iH/eg5 +Z88Hk46Y2u9bLcyHKkjDlRPa2L0EGqF9e9KHn4etfMXyITUHfxiuE4w2kbWghsFf +ITf0b9wgJVZOMFb4hBui1T5t8C/M2pGR+K6qzC7yoMn8wv7WESJqPI/6di1giNau +tsxWrW7aX+eRz+qjfB9VqwKBgQCOfEaMyYuqok8JM7jkCdQNTfYapwigmbSC4j25 +4BsmqT/ogMbIuI3ZrKK4B45Ri+ebtHOzEUYbrqjN9UT09zcyLb2wBKe9qgrsnIvh +6LD6jw0pJxXmwFukZPZo0OBQGR9EVGXHiWLSxTKXVpzPEQoGG/pn0HbmkQTZpLmB +bGvbFQKBgQDKNboMRllLx6adl5vI8aEvX5M4BI/6RIFuMI+6glO8mf2MrC46w0a7 +jo/Z5G2GLfAZ2GXUW3PoWxWYGjxyX12LvOg+R137uzD/itqij9IRgv91X+Go27Co +ch20cYyr3Sblp2hMH9gDL+4fvtKGZKc1Bm+uI3wO61RRBl0JEYT3Ww== -----END RSA PRIVATE KEY----- diff --git a/lib/ndg/httpsclient/test/test_https.py b/lib/ndg/httpsclient/test/test_https.py index 8400e807a4..86e0c51ed2 100644 --- a/lib/ndg/httpsclient/test/test_https.py +++ b/lib/ndg/httpsclient/test/test_https.py @@ -35,10 +35,10 @@ def test01_open(self): def test02_open_fails(self): conn = HTTPSConnection(Constants.HOSTNAME, port=Constants.PORT2) - self.failUnlessRaises(socket.error, conn.connect) + self.assertRaises(socket.error, conn.connect) def test03_ssl_verification_of_peer_fails(self): - ctx = SSL.Context(SSL.SSLv3_METHOD) + ctx = SSL.Context(SSL.TLSv1_METHOD) def verify_callback(conn, x509, errnum, errdepth, preverify_ok): log.debug('SSL peer certificate verification failed for %r', @@ -54,10 +54,10 @@ def verify_callback(conn, x509, errnum, errdepth, preverify_ok): conn = HTTPSConnection(Constants.HOSTNAME, port=Constants.PORT, ssl_context=ctx) conn.connect() - self.failUnlessRaises(SSL.Error, conn.request, 'GET', '/') + self.assertRaises(SSL.Error, conn.request, 'GET', '/') def test03_ssl_verification_of_peer_succeeds(self): - ctx = SSL.Context(SSL.SSLv3_METHOD) + ctx = SSL.Context(SSL.TLSv1_METHOD) verify_callback = lambda conn, x509, errnum, errdepth, preverify_ok: \ preverify_ok @@ -76,10 +76,11 @@ def test03_ssl_verification_of_peer_succeeds(self): print('Response = %s' % resp.read()) def test04_ssl_verification_with_subj_alt_name(self): - ctx = SSL.Context(SSL.SSLv3_METHOD) + ctx = SSL.Context(SSL.TLSv1_METHOD) + + verification = ServerSSLCertVerification(hostname='localhost') + verify_callback = verification.get_verify_server_cert_func() - verify_callback = ServerSSLCertVerification(hostname='localhost') - ctx.set_verify(SSL.VERIFY_PEER, verify_callback) ctx.set_verify_depth(9) @@ -94,13 +95,15 @@ def test04_ssl_verification_with_subj_alt_name(self): print('Response = %s' % resp.read()) def test04_ssl_verification_with_subj_common_name(self): - ctx = SSL.Context(SSL.SSLv3_METHOD) + ctx = SSL.Context(SSL.TLSv1_METHOD) # Explicitly set verification of peer hostname using peer certificate # subject common name - verify_callback = ServerSSLCertVerification(hostname='localhost', - subj_alt_name_match=False) + verification = ServerSSLCertVerification(hostname='localhost', + subj_alt_name_match=False) + verify_callback = verification.get_verify_server_cert_func() + ctx.set_verify(SSL.VERIFY_PEER, verify_callback) ctx.set_verify_depth(9) diff --git a/lib/ndg/httpsclient/test/test_urllib2.py b/lib/ndg/httpsclient/test/test_urllib2.py index 9c1ef8df73..366c2ca400 100644 --- a/lib/ndg/httpsclient/test/test_urllib2.py +++ b/lib/ndg/httpsclient/test/test_urllib2.py @@ -9,7 +9,13 @@ __license__ = "BSD - see LICENSE file in top-level directory" __contact__ = "Philip.Kershaw@stfc.ac.uk" __revision__ = '$Id$' -from urllib2 import URLError +import sys + +if sys.version_info[0] > 2: + from urllib.error import URLError as URLError_ +else: + from urllib2 import URLError as URLError_ + import unittest from OpenSSL import SSL @@ -22,28 +28,28 @@ class Urllib2TestCase(unittest.TestCase): def test01_urllib2_build_opener(self): opener = build_opener() - self.assert_(opener) + self.assertTrue(opener) def test02_open(self): opener = build_opener() res = opener.open(Constants.TEST_URI) - self.assert_(res) + self.assertTrue(res) print("res = %s" % res.read()) def test03_open_fails_unknown_loc(self): opener = build_opener() - self.failUnlessRaises(URLError, opener.open, Constants.TEST_URI2) + self.assertRaises(URLError_, opener.open, Constants.TEST_URI2) def test04_open_peer_cert_verification_fails(self): # Explicitly set empty CA directory to make verification fail - ctx = SSL.Context(SSL.SSLv3_METHOD) + ctx = SSL.Context(SSL.TLSv1_METHOD) verify_callback = lambda conn, x509, errnum, errdepth, preverify_ok: \ preverify_ok ctx.set_verify(SSL.VERIFY_PEER, verify_callback) ctx.load_verify_locations(None, './') opener = build_opener(ssl_context=ctx) - self.failUnlessRaises(SSL.Error, opener.open, Constants.TEST_URI) + self.assertRaises(SSL.Error, opener.open, Constants.TEST_URI) if __name__ == "__main__": diff --git a/lib/ndg/httpsclient/test/test_utils.py b/lib/ndg/httpsclient/test/test_utils.py index fe496a6905..44382d04a5 100644 --- a/lib/ndg/httpsclient/test/test_utils.py +++ b/lib/ndg/httpsclient/test/test_utils.py @@ -23,17 +23,17 @@ class TestUtilsModule(unittest.TestCase): '''Test ndg.httpsclient.utils module''' def test01_configuration(self): - config = Configuration(SSL.Context(SSL.SSLv3_METHOD), True) - self.assert_(config.ssl_context) - self.assertEquals(config.debug, True) + config = Configuration(SSL.Context(SSL.TLSv1_METHOD), True) + self.assertTrue(config.ssl_context) + self.assertEqual(config.debug, True) def test02_fetch_from_url(self): - config = Configuration(SSL.Context(SSL.SSLv3_METHOD), True) + config = Configuration(SSL.Context(SSL.TLSv1_METHOD), True) res = fetch_from_url(Constants.TEST_URI, config) - self.assert_(res) + self.assertTrue(res) def test03_open_url(self): - config = Configuration(SSL.Context(SSL.SSLv3_METHOD), True) + config = Configuration(SSL.Context(SSL.TLSv1_METHOD), True) res = open_url(Constants.TEST_URI, config) self.assertEqual(res[0], 200, 'open_url for %r failed' % Constants.TEST_URI) @@ -57,5 +57,6 @@ def test04__should_use_proxy(self): else: del os.environ['no_proxy'] + if __name__ == "__main__": unittest.main() \ No newline at end of file diff --git a/lib/ndg/httpsclient/urllib2_build_opener.py b/lib/ndg/httpsclient/urllib2_build_opener.py index 106f875324..55d8632698 100644 --- a/lib/ndg/httpsclient/urllib2_build_opener.py +++ b/lib/ndg/httpsclient/urllib2_build_opener.py @@ -8,9 +8,23 @@ __contact__ = "Philip.Kershaw@stfc.ac.uk" __revision__ = '$Id$' import logging -from urllib2 import (ProxyHandler, UnknownHandler, HTTPDefaultErrorHandler, - FTPHandler, FileHandler, HTTPErrorProcessor, HTTPHandler, - OpenerDirector, HTTPRedirectHandler) +import sys + +# Py 2 <=> 3 compatibility for class type checking +if sys.version_info[0] > 2: + class_type_ = type + from urllib.request import (ProxyHandler, UnknownHandler, + HTTPDefaultErrorHandler, FTPHandler, + FileHandler, HTTPErrorProcessor, + HTTPHandler, OpenerDirector, + HTTPRedirectHandler) +else: + import types + class_type_ = types.ClassType + + from urllib2 import (ProxyHandler, UnknownHandler, HTTPDefaultErrorHandler, + FTPHandler, FileHandler, HTTPErrorProcessor, + HTTPHandler, OpenerDirector, HTTPRedirectHandler) from ndg.httpsclient.https import HTTPSContextHandler @@ -27,9 +41,8 @@ def build_opener(*handlers, **kw): If any of the handlers passed as arguments are subclasses of the default handlers, the default handlers will not be used. """ - import types def isclass(obj): - return isinstance(obj, types.ClassType) or hasattr(obj, "__bases__") + return isinstance(obj, class_type_) or hasattr(obj, "__bases__") opener = OpenerDirector() default_classes = [ProxyHandler, UnknownHandler, HTTPHandler, diff --git a/lib/ndg/httpsclient/utils.py b/lib/ndg/httpsclient/utils.py index 5ad02175bc..d55d8f6fc0 100644 --- a/lib/ndg/httpsclient/utils.py +++ b/lib/ndg/httpsclient/utils.py @@ -8,16 +8,35 @@ __contact__ = "Philip.Kershaw@stfc.ac.uk" __revision__ = '$Id$' -import cookielib -import httplib import logging from optparse import OptionParser import os -import urllib2 -from urllib2 import (HTTPHandler, HTTPCookieProcessor, - HTTPBasicAuthHandler, HTTPPasswordMgrWithDefaultRealm) - -import urlparse +import sys + +if sys.version_info[0] > 2: + import http.cookiejar as cookiejar_ + import http.client as http_client_ + from urllib.request import Request as Request_ + from urllib.request import HTTPHandler as HTTPHandler_ + from urllib.request import HTTPCookieProcessor as HTTPCookieProcessor_ + from urllib.request import HTTPBasicAuthHandler as HTTPBasicAuthHandler_ + from urllib.request import HTTPPasswordMgrWithDefaultRealm as \ + HTTPPasswordMgrWithDefaultRealm_ + from urllib.request import ProxyHandler as ProxyHandler_ + from urllib.error import HTTPError as HTTPError_ + import urllib.parse as urlparse_ +else: + import cookielib as cookiejar_ + import httplib as http_client_ + from urllib2 import Request as Request_ + from urllib2 import HTTPHandler as HTTPHandler_ + from urllib2 import HTTPCookieProcessor as HTTPCookieProcessor_ + from urllib2 import HTTPBasicAuthHandler as HTTPBasicAuthHandler_ + from urllib2 import HTTPPasswordMgrWithDefaultRealm as \ + HTTPPasswordMgrWithDefaultRealm_ + from urllib2 import ProxyHandler as ProxyHandler_ + from urllib2 import HTTPError as HTTPError_ + import urlparse as urlparse_ from ndg.httpsclient.urllib2_build_opener import build_opener from ndg.httpsclient.https import HTTPSContextHandler @@ -25,7 +44,7 @@ log = logging.getLogger(__name__) -class AccumulatingHTTPCookieProcessor(HTTPCookieProcessor): +class AccumulatingHTTPCookieProcessor(HTTPCookieProcessor_): """Cookie processor that adds new cookies (instead of replacing the existing ones as HTTPCookieProcessor does) """ @@ -37,7 +56,7 @@ def http_request(self, request): @rtype: urllib2.Request """ COOKIE_HEADER_NAME = "Cookie" - tmp_request = urllib2.Request(request.get_full_url(), request.data, {}, + tmp_request = Request_(request.get_full_url(), request.data, {}, request.origin_req_host, request.unverifiable) self.cookiejar.add_cookie_header(tmp_request) @@ -73,7 +92,7 @@ def fetch_from_url(url, config, data=None, handlers=None): """ return_code, return_message, response = open_url(url, config, data=data, handlers=handlers) - if return_code and return_code == httplib.OK: + if return_code and return_code == http_client_.OK: return_data = response.read() response.close() return return_data @@ -95,14 +114,14 @@ def fetch_from_url_to_file(url, config, output_file, data=None, handlers=None): """ return_code, return_message, response = open_url(url, config, data=data, handlers=handlers) - if return_code == httplib.OK: + if return_code == http_client_.OK: return_data = response.read() response.close() outfile = open(output_file, "w") outfile.write(return_data) outfile.close() - return return_code, return_message, return_code == httplib.OK + return return_code, return_message, return_code == http_client_.OK def fetch_stream_from_url(url, config, data=None, handlers=None): @@ -120,7 +139,7 @@ def fetch_stream_from_url(url, config, data=None, handlers=None): """ return_code, return_message, response = open_url(url, config, data=data, handlers=handlers) - if return_code and return_code == httplib.OK: + if return_code and return_code == http_client_.OK: return response else: raise URLFetchError(return_message) @@ -146,7 +165,7 @@ def open_url(url, config, data=None, handlers=None): if config.cookie: cj = config.cookie else: - cj = cookielib.CookieJar() + cj = cookiejar_.CookieJar() # Use a cookie processor that accumulates cookies when redirects occur so # that an application can redirect for authentication and retain both any @@ -160,17 +179,17 @@ def open_url(url, config, data=None, handlers=None): handlers.append(cookie_handler) if config.debug: - http_handler = HTTPHandler(debuglevel=debuglevel) + http_handler = HTTPHandler_(debuglevel=debuglevel) https_handler = HTTPSContextHandler(config.ssl_context, debuglevel=debuglevel) handlers.extend([http_handler, https_handler]) if config.http_basicauth: # currently only supports http basic auth - auth_handler = HTTPBasicAuthHandler(HTTPPasswordMgrWithDefaultRealm()) + auth_handler = HTTPBasicAuthHandler_(HTTPPasswordMgrWithDefaultRealm_()) auth_handler.add_password(realm=None, uri=url, - user=config.httpauth[0], - passwd=config.httpauth[1]) + user=config.http_basicauth[0], + passwd=config.http_basicauth[1]) handlers.append(auth_handler) @@ -179,10 +198,10 @@ def open_url(url, config, data=None, handlers=None): # set via http_proxy and https_proxy, but does not take the no_proxy value # into account. if not _should_use_proxy(url, config.no_proxy): - handlers.append(urllib2.ProxyHandler({})) + handlers.append(ProxyHandler_({})) log.debug("Not using proxy") elif config.proxies: - handlers.append(urllib2.ProxyHandler(config.proxies)) + handlers.append(ProxyHandler_(config.proxies)) log.debug("Configuring proxies: %s" % config.proxies) opener = build_opener(*handlers, ssl_context=config.ssl_context) @@ -191,12 +210,16 @@ def open_url(url, config, data=None, handlers=None): if headers is None: headers = {} - request = urllib2.Request(url, data, headers) + request = Request_(url, data, headers) # Open the URL and check the response. return_code = 0 return_message = '' response = None + + # FIXME + response = opener.open(request) + try: response = opener.open(request) return_message = response.msg @@ -205,13 +228,13 @@ def open_url(url, config, data=None, handlers=None): for index, cookie in enumerate(cj): log.debug("%s : %s", index, cookie) - except urllib2.HTTPError, exc: + except HTTPError_ as exc: return_code = exc.code return_message = "Error: %s" % exc.msg if log.isEnabledFor(logging.DEBUG): log.debug("%s %s", exc.code, exc.msg) - except Exception, exc: + except Exception as exc: return_message = "Error: %s" % exc.__str__() if log.isEnabledFor(logging.DEBUG): import traceback @@ -231,7 +254,7 @@ def _should_use_proxy(url, no_proxy=None): else: no_proxy_effective = no_proxy - urlObj = urlparse.urlparse(_url_as_string(url)) + urlObj = urlparse_.urlparse(_url_as_string(url)) for np in [h.strip() for h in no_proxy_effective.split(',')]: if urlObj.hostname == np: return False @@ -246,13 +269,13 @@ def _url_as_string(url): @return: URL string @rtype: basestring """ - if isinstance(url, urllib2.Request): + if isinstance(url, Request_): return url.get_full_url() - elif isinstance(url, basestring): + elif isinstance(url, str): return url else: raise TypeError("Expected type %r or %r" % - (basestring, urllib2.Request)) + (str, Request_)) class Configuration(object): @@ -270,7 +293,7 @@ def __init__(self, ssl_context, debug=False, proxies=None, no_proxy=None, @param no_proxy: hosts for which a proxy should not be used @type no_proxy: basestring @param cookie: cookies to set for request - @type cookie: cookielib.CookieJar + @type cookie: cookielib.CookieJar (python 3 - http.cookiejar) @param http_basicauth: http authentication, or None @type http_basicauth: tuple of (username,password) @param headers: http headers diff --git a/lib/oauth2/__init__.py b/lib/oauth2/__init__.py index a9714fe43e..a1776a754d 100644 --- a/lib/oauth2/__init__.py +++ b/lib/oauth2/__init__.py @@ -1,7 +1,7 @@ """ The MIT License -Copyright (c) 2007 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel +Copyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal @@ -22,21 +22,31 @@ THE SOFTWARE. """ -import urllib +import base64 +from hashlib import sha1 import time import random -import urlparse import hmac import binascii -import httplib2 as httplib2 - -try: - from urlparse import parse_qs, parse_qsl -except ImportError: - from cgi import parse_qs, parse_qsl - - -VERSION = '1.0' # Hi Blaine! +import httplib2 + +from ._compat import PY3 +from ._compat import b +from ._compat import parse_qs +from ._compat import quote +from ._compat import STRING_TYPES +from ._compat import TEXT +from ._compat import u +from ._compat import unquote +from ._compat import unquote_to_bytes +from ._compat import urlencode +from ._compat import urlsplit +from ._compat import urlunsplit +from ._compat import urlparse +from ._compat import urlunparse +from ._version import __version__ + +OAUTH_VERSION = '1.0' # Hi Blaine! HTTP_METHOD = 'GET' SIGNATURE_METHOD = 'PLAINTEXT' @@ -44,7 +54,7 @@ class Error(RuntimeError): """Generic exception class.""" - def __init__(self, message='OAuth error occured.'): + def __init__(self, message='OAuth error occurred.'): self._message = message @property @@ -55,18 +65,103 @@ def message(self): def __str__(self): return self._message + class MissingSignature(Error): pass + def build_authenticate_header(realm=''): """Optional WWW-Authenticate header (401 error)""" return {'WWW-Authenticate': 'OAuth realm="%s"' % realm} +def build_xoauth_string(url, consumer, token=None): + """Build an XOAUTH string for use in SMTP/IMPA authentication.""" + request = Request.from_consumer_and_token(consumer, token, + "GET", url) + + signing_method = SignatureMethod_HMAC_SHA1() + request.sign_request(signing_method, consumer, token) + + params = [] + for k, v in sorted(request.items()): + if v is not None: + params.append('%s="%s"' % (k, escape(v))) + + return "%s %s %s" % ("GET", url, ','.join(params)) + + +def to_unicode(s): + """ Convert to unicode, raise exception with instructive error + message if s is not unicode, ascii, or utf-8. """ + if not isinstance(s, TEXT): + if not isinstance(s, bytes): + raise TypeError('You are required to pass either unicode or ' + 'bytes here, not: %r (%s)' % (type(s), s)) + try: + s = s.decode('utf-8') + except UnicodeDecodeError as le: + raise TypeError('You are required to pass either a unicode ' + 'object or a utf-8-enccoded bytes string here. ' + 'You passed a bytes object which contained ' + 'non-utf-8: %r. The UnicodeDecodeError that ' + 'resulted from attempting to interpret it as ' + 'utf-8 was: %s' + % (s, le,)) + return s + +def to_utf8(s): + return to_unicode(s).encode('utf-8') + +def to_unicode_if_string(s): + if isinstance(s, STRING_TYPES): + return to_unicode(s) + else: + return s + +def to_utf8_if_string(s): + if isinstance(s, STRING_TYPES): + return to_utf8(s) + else: + return s + +def to_unicode_optional_iterator(x): + """ + Raise TypeError if x is a str containing non-utf8 bytes or if x is + an iterable which contains such a str. + """ + if isinstance(x, STRING_TYPES): + return to_unicode(x) + + try: + l = list(x) + except TypeError as e: + assert 'is not iterable' in str(e) + return x + else: + return [ to_unicode(e) for e in l ] + +def to_utf8_optional_iterator(x): + """ + Raise TypeError if x is a str or if x is an iterable which + contains a str. + """ + if isinstance(x, STRING_TYPES): + return to_utf8(x) + + try: + l = list(x) + except TypeError as e: + assert 'is not iterable' in str(e) + return x + else: + return [ to_utf8_if_string(e) for e in l ] + def escape(s): """Escape a URL including any /.""" - return urllib.quote(s, safe='~') - + if not isinstance(s, bytes): + s = s.encode('utf-8') + return quote(s, safe='~') def generate_timestamp(): """Get seconds since epoch (UTC).""" @@ -75,12 +170,12 @@ def generate_timestamp(): def generate_nonce(length=8): """Generate pseudorandom number.""" - return ''.join([str(random.randint(0, 9)) for i in range(length)]) + return ''.join([str(random.SystemRandom().randint(0, 9)) for i in range(length)]) def generate_verifier(length=8): """Generate pseudorandom number.""" - return ''.join([str(random.randint(0, 9)) for i in range(length)]) + return ''.join([str(random.SystemRandom().randint(0, 9)) for i in range(length)]) class Consumer(object): @@ -114,12 +209,10 @@ def __init__(self, key, secret): raise ValueError("Key and secret must be set.") def __str__(self): - data = { - 'oauth_consumer_key': self.key, - 'oauth_consumer_secret': self.secret - } + data = {'oauth_consumer_key': self.key, + 'oauth_consumer_secret': self.secret} - return urllib.urlencode(data) + return urlencode(data) class Token(object): @@ -163,13 +256,13 @@ def set_verifier(self, verifier=None): def get_callback_url(self): if self.callback and self.verifier: # Append the oauth_verifier. - parts = urlparse.urlparse(self.callback) + parts = urlparse(self.callback) scheme, netloc, path, params, query, fragment = parts[:6] if query: query = '%s&oauth_verifier=%s' % (query, self.verifier) else: query = 'oauth_verifier=%s' % self.verifier - return urlparse.urlunparse((scheme, netloc, path, params, + return urlunparse((scheme, netloc, path, params, query, fragment)) return self.callback @@ -179,15 +272,14 @@ def to_string(self): The resulting string includes the token's secret, so you should never send or store this string where a third party can read it. """ - - data = { - 'oauth_token': self.key, - 'oauth_token_secret': self.secret, - } + items = [ + ('oauth_token', self.key), + ('oauth_token_secret', self.secret), + ] if self.callback_confirmed is not None: - data['oauth_callback_confirmed'] = self.callback_confirmed - return urllib.urlencode(data) + items.append(('oauth_callback_confirmed', self.callback_confirmed)) + return urlencode(items) @staticmethod def from_string(s): @@ -197,7 +289,7 @@ def from_string(s): if not len(s): raise ValueError("Invalid parameter string.") - params = parse_qs(s, keep_blank_values=False) + params = parse_qs(u(s), keep_blank_values=False) if not len(params): raise ValueError("Invalid parameter string.") @@ -216,7 +308,7 @@ def from_string(s): try: token.callback_confirmed = params['oauth_callback_confirmed'][0] except KeyError: - pass # 1.0, no callback confirmed. + pass # 1.0, no callback confirmed. return token def __str__(self): @@ -250,36 +342,40 @@ class Request(dict): """ - http_method = HTTP_METHOD - http_url = None - version = VERSION - - def __init__(self, method=HTTP_METHOD, url=None, parameters=None): - if method is not None: - self.method = method - + version = OAUTH_VERSION + + def __init__(self, method=HTTP_METHOD, url=None, parameters=None, + body=b'', is_form_encoded=False): if url is not None: - self.url = url - + self.url = to_unicode(url) + self.method = method if parameters is not None: - self.update(parameters) - + for k, v in parameters.items(): + k = to_unicode(k) + v = to_unicode_optional_iterator(v) + self[k] = v + self.body = body + self.is_form_encoded = is_form_encoded + @setter def url(self, value): - parts = urlparse.urlparse(value) - scheme, netloc, path = parts[:3] - - # Exclude default port numbers. - if scheme == 'http' and netloc[-3:] == ':80': - netloc = netloc[:-3] - elif scheme == 'https' and netloc[-4:] == ':443': - netloc = netloc[:-4] - - if scheme != 'http' and scheme != 'https': - raise ValueError("Unsupported URL %s (%s)." % (value, scheme)) - - value = '%s://%s%s' % (scheme, netloc, path) self.__dict__['url'] = value + if value is not None: + scheme, netloc, path, query, fragment = urlsplit(value) + + # Exclude default port numbers. + if scheme == 'http' and netloc[-3:] == ':80': + netloc = netloc[:-3] + elif scheme == 'https' and netloc[-4:] == ':443': + netloc = netloc[:-4] + if scheme not in ('http', 'https'): + raise ValueError("Unsupported URL %s (%s)." % (value, scheme)) + + # Normalized URL excludes params, query, and fragment. + self.normalized_url = urlunsplit((scheme, netloc, path, None, None)) + else: + self.normalized_url = None + self.__dict__['url'] = None @setter def method(self, value): @@ -290,14 +386,14 @@ def _get_timestamp_nonce(self): def get_nonoauth_parameters(self): """Get any non-OAuth parameters.""" - return dict([(k, v) for k, v in self.iteritems() + return dict([(k, v) for k, v in self.items() if not k.startswith('oauth_')]) def to_header(self, realm=''): """Serialize as a header for an HTTPAuth request.""" oauth_params = ((k, v) for k, v in self.items() if k.startswith('oauth_')) - stringy_params = ((k, escape(str(v))) for k, v in oauth_params) + stringy_params = ((k, escape(v)) for k, v in oauth_params) header_params = ('%s="%s"' % (k, v) for k, v in stringy_params) params_header = ', '.join(header_params) @@ -309,17 +405,40 @@ def to_header(self, realm=''): def to_postdata(self): """Serialize as post data for a POST request.""" - return self.encode_postdata(self) + items = [] + for k, v in sorted(self.items()): # predictable for testing + items.append((k.encode('utf-8'), to_utf8_optional_iterator(v))) - def encode_postdata(self, data): # tell urlencode to deal with sequence values and map them correctly # to resulting querystring. for example self["k"] = ["v1", "v2"] will # result in 'k=v1&k=v2' and not k=%5B%27v1%27%2C+%27v2%27%5D - return urllib.urlencode(data, True) - + return urlencode(items, True).replace('+', '%20') + def to_url(self): """Serialize as a URL for a GET request.""" - return '%s?%s' % (self.url, self.to_postdata()) + base_url = urlparse(self.url) + + if PY3: + query = parse_qs(base_url.query) + for k, v in self.items(): + query.setdefault(k, []).append(to_utf8_optional_iterator(v)) + scheme = base_url.scheme + netloc = base_url.netloc + path = base_url.path + params = base_url.params + fragment = base_url.fragment + else: + query = parse_qs(to_utf8(base_url.query)) + for k, v in self.items(): + query.setdefault(to_utf8(k), []).append(to_utf8_optional_iterator(v)) + scheme = to_utf8(base_url.scheme) + netloc = to_utf8(base_url.netloc) + path = to_utf8(base_url.path) + params = to_utf8(base_url.params) + fragment = to_utf8(base_url.fragment) + + url = (scheme, netloc, path, params, urlencode(query, True), fragment) + return urlunparse(url) def get_parameter(self, parameter): ret = self.get(parameter) @@ -327,20 +446,52 @@ def get_parameter(self, parameter): raise Error('Parameter not found: %s' % parameter) return ret - + def get_normalized_parameters(self): """Return a string that contains the parameters that must be signed.""" - items = [(k, v) for k, v in self.items() if k != 'oauth_signature'] - encoded_str = urllib.urlencode(sorted(items), True) + items = [] + for key, value in self.items(): + if key == 'oauth_signature': + continue + # 1.0a/9.1.1 states that kvp must be sorted by key, then by value, + # so we unpack sequence values into multiple items for sorting. + if isinstance(value, STRING_TYPES): + items.append((to_utf8_if_string(key), to_utf8(value))) + else: + try: + value = list(value) + except TypeError as e: + assert 'is not iterable' in str(e) + items.append((to_utf8_if_string(key), to_utf8_if_string(value))) + else: + items.extend((to_utf8_if_string(key), to_utf8_if_string(item)) for item in value) + + # Include any query string parameters from the provided URL + query = urlparse(self.url)[4] + + url_items = self._split_url_string(query).items() + url_items = [(to_utf8(k), to_utf8_optional_iterator(v)) for k, v in url_items if k != 'oauth_signature' ] + items.extend(url_items) + + items.sort() + encoded_str = urlencode(items, True) # Encode signature parameters per Oauth Core 1.0 protocol # spec draft 7, section 3.6 # (http://tools.ietf.org/html/draft-hammer-oauth-07#section-3.6) # Spaces must be encoded with "%20" instead of "+" - return encoded_str.replace('+', '%20') - + return encoded_str.replace('+', '%20').replace('%7E', '~') + def sign_request(self, signature_method, consumer, token): """Set the signature parameter to the result of sign.""" + if not self.is_form_encoded: + # according to + # http://oauth.googlecode.com/svn/spec/ext/body_hash/1.0/oauth-bodyhash.html + # section 4.1.1 "OAuth Consumers MUST NOT include an + # oauth_body_hash parameter on requests with form-encoded + # request bodies." + self['oauth_body_hash'] = base64.b64encode(sha1(self.body).digest()) + if 'oauth_consumer_key' not in self: self['oauth_consumer_key'] = consumer.key @@ -358,7 +509,7 @@ def make_timestamp(cls): @classmethod def make_nonce(cls): """Generate pseudorandom number.""" - return str(random.randint(0, 100000000)) + return str(random.SystemRandom().randint(0, 100000000)) @classmethod def from_request(cls, http_method, http_url, headers=None, parameters=None, @@ -368,10 +519,15 @@ def from_request(cls, http_method, http_url, headers=None, parameters=None, parameters = {} # Headers - if headers and 'Authorization' in headers: - auth_header = headers['Authorization'] + if headers: + auth_header = None + for k, v in headers.items(): + if k.lower() == 'authorization' or \ + k.upper() == 'HTTP_AUTHORIZATION': + auth_header = v + # Check that the authorization header is OAuth. - if auth_header[:6] == 'OAuth ': + if auth_header and auth_header[:6] == 'OAuth ': auth_header = auth_header[6:] try: # Get the parameters from the header. @@ -387,7 +543,7 @@ def from_request(cls, http_method, http_url, headers=None, parameters=None, parameters.update(query_params) # URL parameters. - param_str = urlparse.urlparse(http_url)[4] # query + param_str = urlparse(http_url)[4] # query url_params = cls._split_url_string(param_str) parameters.update(url_params) @@ -398,7 +554,8 @@ def from_request(cls, http_method, http_url, headers=None, parameters=None, @classmethod def from_consumer_and_token(cls, consumer, token=None, - http_method=HTTP_METHOD, http_url=None, parameters=None): + http_method=HTTP_METHOD, http_url=None, parameters=None, + body=b'', is_form_encoded=False): if not parameters: parameters = {} @@ -414,8 +571,11 @@ def from_consumer_and_token(cls, consumer, token=None, if token: parameters['oauth_token'] = token.key + if token.verifier: + parameters['oauth_verifier'] = token.verifier - return Request(http_method, http_url, parameters) + return cls(http_method, http_url, parameters, body=body, + is_form_encoded=is_form_encoded) @classmethod def from_token_and_callback(cls, token, callback=None, @@ -445,18 +605,88 @@ def _split_header(header): # Split key-value. param_parts = param.split('=', 1) # Remove quotes and unescape the value. - params[param_parts[0]] = urllib.unquote(param_parts[1].strip('\"')) + params[param_parts[0]] = unquote(param_parts[1].strip('\"')) return params @staticmethod def _split_url_string(param_str): """Turn URL string into parameters.""" - parameters = parse_qs(param_str, keep_blank_values=False) - for k, v in parameters.iteritems(): - parameters[k] = urllib.unquote(v[0]) + if not PY3: + # If passed unicode with quoted UTF8, Python2's parse_qs leaves + # mojibake'd uniocde after unquoting, so encode first. + param_str = b(param_str, 'utf-8') + parameters = parse_qs(param_str, keep_blank_values=True) + for k, v in parameters.items(): + if len(v) == 1: + parameters[k] = unquote(v[0]) + else: + parameters[k] = sorted([unquote(s) for s in v]) return parameters +class Client(httplib2.Http): + """OAuthClient is a worker to attempt to execute a request.""" + + def __init__(self, consumer, token=None, **kwargs): + + if consumer is not None and not isinstance(consumer, Consumer): + raise ValueError("Invalid consumer.") + + if token is not None and not isinstance(token, Token): + raise ValueError("Invalid token.") + + self.consumer = consumer + self.token = token + self.method = SignatureMethod_HMAC_SHA1() + + super(Client, self).__init__(**kwargs) + + def set_signature_method(self, method): + if not isinstance(method, SignatureMethod): + raise ValueError("Invalid signature method.") + + self.method = method + + def request(self, uri, method="GET", body=b'', headers=None, + redirections=httplib2.DEFAULT_MAX_REDIRECTS, connection_type=None): + DEFAULT_POST_CONTENT_TYPE = 'application/x-www-form-urlencoded' + + if not isinstance(headers, dict): + headers = {} + + if method == "POST": + headers['Content-Type'] = headers.get('Content-Type', + DEFAULT_POST_CONTENT_TYPE) + + is_form_encoded = \ + headers.get('Content-Type') == 'application/x-www-form-urlencoded' + + if is_form_encoded and body: + parameters = parse_qs(body) + else: + parameters = None + + req = Request.from_consumer_and_token(self.consumer, + token=self.token, http_method=method, http_url=uri, + parameters=parameters, body=body, is_form_encoded=is_form_encoded) + + req.sign_request(self.method, self.consumer, self.token) + + scheme, netloc, path, params, query, fragment = urlparse(uri) + realm = urlunparse((scheme, netloc, '', None, None, None)) + + if is_form_encoded: + body = req.to_postdata() + elif method == "GET": + uri = req.to_url() + else: + headers.update(req.to_header(realm=realm)) + + return httplib2.Http.request(self, uri, method=method, body=body, + headers=headers, redirections=redirections, + connection_type=connection_type) + + class Server(object): """A skeletal implementation of a service provider, providing protected resources to requests from authorized consumers. @@ -467,7 +697,7 @@ class Server(object): """ timestamp_threshold = 300 # In seconds, five minutes. - version = VERSION + version = OAUTH_VERSION signature_methods = None def __init__(self, signature_methods=None): @@ -480,7 +710,7 @@ def add_signature_method(self, signature_method): def verify_request(self, request, consumer, token): """Verifies an api call and checks all the parameters.""" - version = self._get_version(request) + self._check_version(request) self._check_signature(request, consumer, token) parameters = request.get_nonoauth_parameters() return parameters @@ -489,45 +719,43 @@ def build_authenticate_header(self, realm=''): """Optional support for the authenticate header.""" return {'WWW-Authenticate': 'OAuth realm="%s"' % realm} + def _check_version(self, request): + """Verify the correct version of the request for this server.""" + version = self._get_version(request) + if version and version != self.version: + raise Error('OAuth version %s not supported.' % str(version)) + def _get_version(self, request): - """Verify the correct version request for this server.""" + """Return the version of the request for this server.""" try: version = request.get_parameter('oauth_version') except: - version = VERSION - - if version and version != self.version: - raise Error('OAuth version %s not supported.' % str(version)) + version = OAUTH_VERSION return version def _get_signature_method(self, request): """Figure out the signature with some defaults.""" - try: - signature_method = request.get_parameter('oauth_signature_method') - except: + signature_method = request.get('oauth_signature_method') + if signature_method is None: signature_method = SIGNATURE_METHOD try: # Get the signature method object. - signature_method = self.signature_methods[signature_method] - except: + return self.signature_methods[signature_method] + except KeyError: signature_method_names = ', '.join(self.signature_methods.keys()) - raise Error('Signature method %s not supported try one of the following: %s' % (signature_method, signature_method_names)) - - return signature_method - - def _get_verifier(self, request): - return request.get_parameter('oauth_verifier') + raise Error('Signature method %s not supported try one of the ' + 'following: %s' + % (signature_method, signature_method_names)) def _check_signature(self, request, consumer, token): timestamp, nonce = request._get_timestamp_nonce() self._check_timestamp(timestamp) signature_method = self._get_signature_method(request) - try: - signature = request.get_parameter('oauth_signature') - except: + signature = request.get('oauth_signature') + if signature is None: raise MissingSignature('Missing oauth_signature.') # Validate the signature. @@ -539,8 +767,6 @@ def _check_signature(self, request, consumer, token): raise Error('Invalid signature. Expected signature base ' 'string: %s' % base) - built = signature_method.sign(request, consumer, token) - def _check_timestamp(self, timestamp): """Verify that timestamp is recentish.""" timestamp = int(timestamp) @@ -548,75 +774,8 @@ def _check_timestamp(self, timestamp): lapsed = now - timestamp if lapsed > self.timestamp_threshold: raise Error('Expired timestamp: given %d and now %s has a ' - 'greater difference than threshold %d' % (timestamp, now, self.timestamp_threshold)) - - -class Client(httplib2.Http): - """OAuthClient is a worker to attempt to execute a request.""" - - def __init__(self, consumer, token=None, cache=None, timeout=None, - proxy_info=None): - - if consumer is not None and not isinstance(consumer, Consumer): - raise ValueError("Invalid consumer.") - - if token is not None and not isinstance(token, Token): - raise ValueError("Invalid token.") - - self.consumer = consumer - self.token = token - self.method = SignatureMethod_HMAC_SHA1() - - httplib2.Http.__init__(self, cache=cache, timeout=timeout, - proxy_info=proxy_info) - - def set_signature_method(self, method): - if not isinstance(method, SignatureMethod): - raise ValueError("Invalid signature method.") - - self.method = method - - def request(self, uri, method="GET", body=None, headers=None, - redirections=httplib2.DEFAULT_MAX_REDIRECTS, connection_type=None, - force_auth_header=False): - - if not isinstance(headers, dict): - headers = {} - - if body and method == "POST": - parameters = dict(parse_qsl(body)) - elif method == "GET": - parsed = urlparse.urlparse(uri) - parameters = parse_qs(parsed.query) - else: - parameters = None - - req = Request.from_consumer_and_token(self.consumer, token=self.token, - http_method=method, http_url=uri, parameters=parameters) - - req.sign_request(self.method, self.consumer, self.token) - - if force_auth_header: - # ensure we always send Authorization - headers.update(req.to_header()) - - if method == "POST": - if not force_auth_header: - body = req.to_postdata() - else: - body = req.encode_postdata(req.get_nonoauth_parameters()) - headers['Content-Type'] = 'application/x-www-form-urlencoded' - elif method == "GET": - if not force_auth_header: - uri = req.to_url() - else: - if not force_auth_header: - # don't call update twice. - headers.update(req.to_header()) - - return httplib2.Http.request(self, uri, method=method, body=body, - headers=headers, redirections=redirections, - connection_type=connection_type) + 'greater difference than threshold %d' % (timestamp, now, + self.timestamp_threshold)) class SignatureMethod(object): @@ -628,7 +787,7 @@ class SignatureMethod(object): provide a new way to sign requests. """ - def signing_base(self, request, consumer, token): + def signing_base(self, request, consumer, token): #pragma NO COVER """Calculates the string that needs to be signed. This method returns a 2-tuple containing the starting key for the @@ -638,7 +797,7 @@ def signing_base(self, request, consumer, token): """ raise NotImplementedError - def sign(self, request, consumer, token): + def sign(self, request, consumer, token): #pragma NO COVER """Returns the signature for the given request, based on the consumer and token also provided. @@ -657,11 +816,14 @@ def check(self, request, consumer, token, signature): class SignatureMethod_HMAC_SHA1(SignatureMethod): name = 'HMAC-SHA1' - + def signing_base(self, request, consumer, token): + if (not hasattr(request, 'normalized_url') or request.normalized_url is None): + raise ValueError("Base URL for request is not set.") + sig = ( escape(request.method), - escape(request.url), + escape(request.normalized_url), escape(request.get_normalized_parameters()), ) @@ -669,23 +831,18 @@ def signing_base(self, request, consumer, token): if token: key += escape(token.secret) raw = '&'.join(sig) - return key, raw + return key.encode('ascii'), raw.encode('ascii') def sign(self, request, consumer, token): """Builds the base signature string.""" key, raw = self.signing_base(request, consumer, token) - # HMAC object. - try: - import hashlib # 2.5 - hashed = hmac.new(key, raw, hashlib.sha1) - except ImportError: - import sha # Deprecated - hashed = hmac.new(key, raw, sha) + hashed = hmac.new(key, raw, sha1) # Calculate the digest base 64. return binascii.b2a_base64(hashed.digest())[:-1] + class SignatureMethod_PLAINTEXT(SignatureMethod): name = 'PLAINTEXT' @@ -700,5 +857,4 @@ def signing_base(self, request, consumer, token): def sign(self, request, consumer, token): key, raw = self.signing_base(request, consumer, token) - return raw - + return raw.encode('utf8') diff --git a/lib/oauth2/_compat.py b/lib/oauth2/_compat.py new file mode 100644 index 0000000000..b3f9ad0184 --- /dev/null +++ b/lib/oauth2/_compat.py @@ -0,0 +1,48 @@ +try: + TEXT = unicode +except NameError: #pragma NO COVER Py3k + PY3 = True + TEXT = str + STRING_TYPES = (str, bytes) + def b(x, encoding='ascii'): + return bytes(x, encoding) +else: #pragma NO COVER Python2 + PY3 = False + STRING_TYPES = (unicode, bytes) + def b(x, encoding='ascii'): + if isinstance(x, unicode): + x = x.encode(encoding) + return x + +def u(x, encoding='ascii'): + if isinstance(x, TEXT): #pragma NO COVER + return x + try: + return x.decode(encoding) + except AttributeError: #pragma NO COVER + raise ValueError('WTF: %s' % x) + +try: + import urlparse +except ImportError: #pragma NO COVER Py3k + from urllib.parse import parse_qs + from urllib.parse import parse_qsl + from urllib.parse import quote + from urllib.parse import unquote + from urllib.parse import unquote_to_bytes + from urllib.parse import urlencode + from urllib.parse import urlsplit + from urllib.parse import urlunsplit + from urllib.parse import urlparse + from urllib.parse import urlunparse +else: #pragma NO COVER Python2 + from urlparse import parse_qs + from urlparse import parse_qsl + from urllib import quote + from urllib import unquote + from urllib import urlencode + from urlparse import urlsplit + from urlparse import urlunsplit + from urlparse import urlparse + from urlparse import urlunparse + unquote_to_bytes = unquote diff --git a/lib/oauth2/_version.py b/lib/oauth2/_version.py new file mode 100644 index 0000000000..3b813cb2f6 --- /dev/null +++ b/lib/oauth2/_version.py @@ -0,0 +1,19 @@ +# This is the version of this source code. + +manual_verstr = "1.9" + + + +auto_build_num = "0.post1" + + + +verstr = manual_verstr + "." + auto_build_num +try: + from pyutil.version_class import Version as pyutil_Version +except (ImportError, ValueError): #pragma NO COVER + # Maybe there is no pyutil installed. + from distutils.version import LooseVersion as distutils_Version + __version__ = distutils_Version(verstr) +else: #pragma NO COVER + __version__ = pyutil_Version(verstr) diff --git a/lib/oauth2/clients/__init__.py b/lib/oauth2/clients/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/lib/oauth2/clients/imap.py b/lib/oauth2/clients/imap.py new file mode 100644 index 0000000000..68b7cd8c62 --- /dev/null +++ b/lib/oauth2/clients/imap.py @@ -0,0 +1,40 @@ +""" +The MIT License + +Copyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +""" + +import oauth2 +import imaplib + + +class IMAP4_SSL(imaplib.IMAP4_SSL): + """IMAP wrapper for imaplib.IMAP4_SSL that implements XOAUTH.""" + + def authenticate(self, url, consumer, token): + if consumer is not None and not isinstance(consumer, oauth2.Consumer): + raise ValueError("Invalid consumer.") + + if token is not None and not isinstance(token, oauth2.Token): + raise ValueError("Invalid token.") + + imaplib.IMAP4_SSL.authenticate(self, 'XOAUTH', + lambda x: oauth2.build_xoauth_string(url, consumer, token)) diff --git a/lib/oauth2/clients/smtp.py b/lib/oauth2/clients/smtp.py new file mode 100644 index 0000000000..3e7bf0b0bc --- /dev/null +++ b/lib/oauth2/clients/smtp.py @@ -0,0 +1,41 @@ +""" +The MIT License + +Copyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +""" + +import oauth2 +import smtplib +import base64 + + +class SMTP(smtplib.SMTP): + """SMTP wrapper for smtplib.SMTP that implements XOAUTH.""" + + def authenticate(self, url, consumer, token): + if consumer is not None and not isinstance(consumer, oauth2.Consumer): + raise ValueError("Invalid consumer.") + + if token is not None and not isinstance(token, oauth2.Token): + raise ValueError("Invalid token.") + + self.docmd('AUTH', 'XOAUTH %s' % \ + base64.b64encode(oauth2.build_xoauth_string(url, consumer, token))) diff --git a/lib/profilehooks.py b/lib/profilehooks.py index fcc0cdc2f7..648766b5bc 100644 --- a/lib/profilehooks.py +++ b/lib/profilehooks.py @@ -16,7 +16,19 @@ def fn(n): if n < 2: return 1 else: return n * fn(n-1) - print fn(42) + print(fn(42)) + +Or without imports, with some hack + + $ python -m profilehooks yourmodule + + @profile # or @coverage + def fn(n): + if n < 2: return 1 + else: return n * fn(n-1) + + print(fn(42)) + Usage example (Python 2.3 or older):: @@ -62,7 +74,7 @@ def fn(n): executed. For this reason coverage analysis now uses trace.py which is slower, but more accurate. -Copyright (c) 2004--2008 Marius Gedminas +Copyright (c) 2004--2014 Marius Gedminas Copyright (c) 2007 Hanno Schlichting Copyright (c) 2008 Florian Schulze @@ -88,19 +100,19 @@ def fn(n): (Previously it was distributed under the GNU General Public Licence.) """ -# $Id: profilehooks.py 29 2010-08-13 16:29:20Z mg $ -__author__ = "Marius Gedminas (marius@gedmin.as)" -__copyright__ = "Copyright 2004-2009 Marius Gedminas" +__author__ = "Marius Gedminas " +__copyright__ = "Copyright 2004-2015 Marius Gedminas and contributors" __license__ = "MIT" -__version__ = "1.4" -__date__ = "2009-03-31" +__version__ = '1.8.1' +__date__ = "2015-11-21" import atexit import inspect import sys import re +import os # For profiling from profile import Profile @@ -134,17 +146,21 @@ def fn(n): # registry of available profilers AVAILABLE_PROFILERS = {} +__all__ = ['coverage', 'coverage_with_hotshot', 'profile', 'timecall'] + def profile(fn=None, skip=0, filename=None, immediate=False, dirs=False, sort=None, entries=40, - profiler=('cProfile', 'profile', 'hotshot')): + profiler=('cProfile', 'profile', 'hotshot'), + stdout=True): """Mark `fn` for profiling. If `skip` is > 0, first `skip` calls to `fn` will not be profiled. If `immediate` is False, profiling results will be printed to sys.stdout on program termination. Otherwise results will be printed - after each call. + after each call. (If you don't want this, set stdout=False and specify a + `filename` to store profile data.) If `dirs` is False only the name of the file will be printed. Otherwise the full path is used. @@ -170,7 +186,8 @@ def profile(fn=None, skip=0, filename=None, immediate=False, dirs=False, 'profile', 'hotshot'). If `filename` is specified, the profile stats will be stored in the - named file. You can load them pstats.Stats(filename). + named file. You can load them with pstats.Stats(filename) or use a + visualization tool like RunSnakeRun. Usage:: @@ -192,12 +209,12 @@ def fn(...): ... """ - if fn is None: # @profile() syntax -- we are a decorator maker + if fn is None: # @profile() syntax -- we are a decorator maker def decorator(fn): return profile(fn, skip=skip, filename=filename, immediate=immediate, dirs=dirs, sort=sort, entries=entries, - profiler=profiler) + profiler=profiler, stdout=stdout) return decorator # @profile syntax -- we are a decorator. if isinstance(profiler, str): @@ -208,14 +225,13 @@ def decorator(fn): break else: raise ValueError('only these profilers are available: %s' - % ', '.join(AVAILABLE_PROFILERS)) + % ', '.join(sorted(AVAILABLE_PROFILERS))) fp = profiler_class(fn, skip=skip, filename=filename, immediate=immediate, dirs=dirs, - sort=sort, entries=entries) - # fp = HotShotFuncProfile(fn, skip=skip, filename=filename, ...) - # or HotShotFuncProfile + sort=sort, entries=entries, stdout=stdout) # We cannot return fp or fp.__call__ directly as that would break method # definitions, instead we need to return a plain function. + def new_fn(*args, **kw): return fp(*args, **kw) new_fn.__doc__ = fn.__doc__ @@ -244,9 +260,10 @@ def fn(...): ... """ - fp = TraceFuncCoverage(fn) # or HotShotFuncCoverage + fp = TraceFuncCoverage(fn) # or HotShotFuncCoverage # We cannot return fp or fp.__call__ directly as that would break method # definitions, instead we need to return a plain function. + def new_fn(*args, **kw): return fp(*args, **kw) new_fn.__doc__ = fn.__doc__ @@ -268,6 +285,7 @@ def coverage_with_hotshot(fn): fp = HotShotFuncCoverage(fn) # We cannot return fp or fp.__call__ directly as that would break method # definitions, instead we need to return a plain function. + def new_fn(*args, **kw): return fp(*args, **kw) new_fn.__doc__ = fn.__doc__ @@ -286,7 +304,7 @@ class FuncProfile(object): Profile = Profile def __init__(self, fn, skip=0, filename=None, immediate=False, dirs=False, - sort=None, entries=40): + sort=None, entries=40, stdout=True): """Creates a profiler for a function. Every profiler has its own log file (the name of which is derived @@ -299,6 +317,7 @@ def __init__(self, fn, skip=0, filename=None, immediate=False, dirs=False, self.skip = skip self.filename = filename self.immediate = immediate + self.stdout = stdout self.dirs = dirs self.sort = sort or ('cumulative', 'time', 'calls') if isinstance(self.sort, str): @@ -332,25 +351,26 @@ def __call__(self, *args, **kw): def print_stats(self): """Print profile information to sys.stdout.""" - funcname = self.fn.__name__ - filename = self.fn.func_code.co_filename - lineno = self.fn.func_code.co_firstlineno - print - print "*** PROFILER RESULTS ***" - print "%s (%s:%s)" % (funcname, filename, lineno) - print "function called %d times" % self.ncalls, - if self.skipped: - print "(%d calls not profiled)" % self.skipped - else: - print - print stats = self.stats if self.filename: stats.dump_stats(self.filename) - if not self.dirs: - stats.strip_dirs() - stats.sort_stats(*self.sort) - stats.print_stats(self.entries) + if self.stdout: + funcname = self.fn.__name__ + filename = self.fn.__code__.co_filename + lineno = self.fn.__code__.co_firstlineno + print("") + print("*** PROFILER RESULTS ***") + print("%s (%s:%s)" % (funcname, filename, lineno)) + if self.skipped: + skipped = " (%d calls not profiled)" % self.skipped + else: + skipped = "" + print("function called %d times%s" % (self.ncalls, skipped)) + print("") + if not self.dirs: + stats.strip_dirs() + stats.sort_stats(*self.sort) + stats.print_stats(self.entries) def reset_stats(self): """Reset accumulated profiler statistics.""" @@ -364,6 +384,7 @@ def atexit(self): This function is registered as an atexit hook. """ + # XXX: uh, why even register this as an atexit hook if immediate is True? if not self.immediate: self.print_stats() @@ -383,13 +404,14 @@ class CProfileFuncProfile(FuncProfile): if hotshot is not None: - class HotShotFuncProfile(object): + class HotShotFuncProfile(FuncProfile): """Profiler for a function (uses hotshot).""" # This flag is shared between all instances in_profiler = False - def __init__(self, fn, skip=0, filename=None): + def __init__(self, fn, skip=0, filename=None, immediate=False, + dirs=False, sort=None, entries=40, stdout=True): """Creates a profiler for a function. Every profiler has its own log file (the name of which is derived @@ -401,17 +423,13 @@ def __init__(self, fn, skip=0, filename=None): The log file is not removed and remains there to clutter the current working directory. """ - self.fn = fn - self.filename = filename - if self.filename: + if filename: self.logfilename = filename + ".raw" else: - self.logfilename = fn.__name__ + ".prof" - self.profiler = hotshot.Profile(self.logfilename) - self.ncalls = 0 - self.skip = skip - self.skipped = 0 - atexit.register(self.atexit) + self.logfilename = "%s.%d.prof" % (fn.__name__, os.getpid()) + super(HotShotFuncProfile, self).__init__( + fn, skip=skip, filename=filename, immediate=immediate, + dirs=dirs, sort=sort, entries=entries, stdout=stdout) def __call__(self, *args, **kw): """Profile a singe call to the function.""" @@ -423,42 +441,31 @@ def __call__(self, *args, **kw): if HotShotFuncProfile.in_profiler: # handle recursive calls return self.fn(*args, **kw) + if self.profiler is None: + self.profiler = hotshot.Profile(self.logfilename) try: HotShotFuncProfile.in_profiler = True return self.profiler.runcall(self.fn, *args, **kw) finally: HotShotFuncProfile.in_profiler = False + if self.immediate: + self.print_stats() + self.reset_stats() - def atexit(self): - """Stop profiling and print profile information to sys.stderr. - - This function is registered as an atexit hook. - """ - self.profiler.close() - funcname = self.fn.__name__ - filename = self.fn.func_code.co_filename - lineno = self.fn.func_code.co_firstlineno - print - print "*** PROFILER RESULTS ***" - print "%s (%s:%s)" % (funcname, filename, lineno) - print "function called %d times" % self.ncalls, - if self.skipped: - print "(%d calls not profiled)" % self.skipped + def print_stats(self): + if self.profiler is None: + self.stats = pstats.Stats(Profile()) else: - print - print - stats = hotshot.stats.load(self.logfilename) - # hotshot.stats.load takes ages, and the .prof file eats megabytes, but - # a saved stats object is small and fast - if self.filename: - stats.dump_stats(self.filename) - # it is best to save before strip_dirs - stats.strip_dirs() - stats.sort_stats('cumulative', 'time', 'calls') - stats.print_stats(40) + self.profiler.close() + self.stats = hotshot.stats.load(self.logfilename) + super(HotShotFuncProfile, self).print_stats() - AVAILABLE_PROFILERS['hotshot'] = HotShotFuncProfile + def reset_stats(self): + self.profiler = None + self.ncalls = 0 + self.skipped = 0 + AVAILABLE_PROFILERS['hotshot'] = HotShotFuncProfile class HotShotFuncCoverage: """Coverage analysis for a function (uses _hotshot). @@ -482,7 +489,7 @@ def __init__(self, fn): current working directory. """ self.fn = fn - self.logfilename = fn.__name__ + ".cprof" + self.logfilename = "%s.%d.cprof" % (fn.__name__, os.getpid()) self.profiler = _hotshot.coverage(self.logfilename) self.ncalls = 0 atexit.register(self.atexit) @@ -490,7 +497,11 @@ def __init__(self, fn): def __call__(self, *args, **kw): """Profile a singe call to the function.""" self.ncalls += 1 - return self.profiler.runcall(self.fn, args, kw) + old_trace = sys.gettrace() + try: + return self.profiler.runcall(self.fn, args, kw) + finally: # pragma: nocover + sys.settrace(old_trace) def atexit(self): """Stop profiling and print profile information to sys.stderr. @@ -499,13 +510,13 @@ def atexit(self): """ self.profiler.close() funcname = self.fn.__name__ - filename = self.fn.func_code.co_filename - lineno = self.fn.func_code.co_firstlineno - print - print "*** COVERAGE RESULTS ***" - print "%s (%s:%s)" % (funcname, filename, lineno) - print "function called %d times" % self.ncalls - print + filename = self.fn.__code__.co_filename + lineno = self.fn.__code__.co_firstlineno + print("") + print("*** COVERAGE RESULTS ***") + print("%s (%s:%s)" % (funcname, filename, lineno)) + print("function called %d times" % self.ncalls) + print("") fs = FuncSource(self.fn) reader = hotshot.log.LogReader(self.logfilename) for what, (filename, lineno, funcname), tdelta in reader: @@ -522,7 +533,10 @@ def atexit(self): lineno = fs.firstcodelineno fs.mark(lineno) reader.close() - print fs + print(fs) + never_executed = fs.count_never_executed() + if never_executed: + print("%d lines were not executed." % never_executed) class TraceFuncCoverage: @@ -552,19 +566,21 @@ def __init__(self, fn): current working directory. """ self.fn = fn - self.logfilename = fn.__name__ + ".cprof" + self.logfilename = "%s.%d.cprof" % (fn.__name__, os.getpid()) self.ncalls = 0 atexit.register(self.atexit) def __call__(self, *args, **kw): """Profile a singe call to the function.""" self.ncalls += 1 - if TraceFuncCoverage.tracing: + if TraceFuncCoverage.tracing: # pragma: nocover return self.fn(*args, **kw) + old_trace = sys.gettrace() try: TraceFuncCoverage.tracing = True return self.tracer.runfunc(self.fn, *args, **kw) - finally: + finally: # pragma: nocover + sys.settrace(old_trace) TraceFuncCoverage.tracing = False def atexit(self): @@ -573,22 +589,22 @@ def atexit(self): This function is registered as an atexit hook. """ funcname = self.fn.__name__ - filename = self.fn.func_code.co_filename - lineno = self.fn.func_code.co_firstlineno - print - print "*** COVERAGE RESULTS ***" - print "%s (%s:%s)" % (funcname, filename, lineno) - print "function called %d times" % self.ncalls - print + filename = self.fn.__code__.co_filename + lineno = self.fn.__code__.co_firstlineno + print("") + print("*** COVERAGE RESULTS ***") + print("%s (%s:%s)" % (funcname, filename, lineno)) + print("function called %d times" % self.ncalls) + print("") fs = FuncSource(self.fn) for (filename, lineno), count in self.tracer.counts.items(): if filename != fs.filename: continue fs.mark(lineno, count) - print fs + print(fs) never_executed = fs.count_never_executed() if never_executed: - print "%d lines were not executed." % never_executed + print("%d lines were not executed." % never_executed) class FuncSource: @@ -599,20 +615,28 @@ class FuncSource: def __init__(self, fn): self.fn = fn self.filename = inspect.getsourcefile(fn) - self.source, self.firstlineno = inspect.getsourcelines(fn) self.sourcelines = {} - self.firstcodelineno = self.firstlineno - self.find_source_lines() + self.source = [] + self.firstlineno = self.firstcodelineno = 0 + try: + self.source, self.firstlineno = inspect.getsourcelines(fn) + self.firstcodelineno = self.firstlineno + self.find_source_lines() + except IOError: + self.filename = None def find_source_lines(self): """Mark all executable source lines in fn as executed 0 times.""" + if self.filename is None: + return strs = trace.find_strings(self.filename) - lines = trace.find_lines_from_code(self.fn.func_code, strs) - self.firstcodelineno = sys.maxint + lines = trace.find_lines_from_code(self.fn.__code__, strs) for lineno in lines: - self.firstcodelineno = min(self.firstcodelineno, lineno) self.sourcelines.setdefault(lineno, 0) - if self.firstcodelineno == sys.maxint: + if lines: + self.firstcodelineno = min(lines) + else: # pragma: nocover + # This branch cannot be reached, I'm just being paranoid. self.firstcodelineno = self.firstlineno def mark(self, lineno, count=1): @@ -635,6 +659,8 @@ def count_never_executed(self): def __str__(self): """Return annotated source code for the function.""" + if self.filename is None: + return "cannot show coverage data since co_filename is None" lines = [] lineno = self.firstlineno for line in self.source: @@ -642,7 +668,10 @@ def __str__(self): if counter is None: prefix = ' ' * 7 elif counter == 0: - if self.blank_rx.match(line): + if self.blank_rx.match(line): # pragma: nocover + # This is an workaround for an ancient bug I can't + # reproduce, perhaps because it was fixed, or perhaps + # because I can't remember all the details. prefix = ' ' * 7 else: prefix = '>' * 6 + ' ' @@ -653,7 +682,7 @@ def __str__(self): return ''.join(lines) -def timecall(fn=None, immediate=True, timer=time.time): +def timecall(fn=None, immediate=True, timer=None): """Wrap `fn` and print its execution time. Example:: @@ -675,14 +704,17 @@ def somefunc(x, y): @timecall(timer=time.clock) """ - if fn is None: # @timecall() syntax -- we are a decorator maker + if fn is None: # @timecall() syntax -- we are a decorator maker def decorator(fn): return timecall(fn, immediate=immediate, timer=timer) return decorator # @timecall syntax -- we are a decorator. + if timer is None: + timer = time.time fp = FuncTimer(fn, immediate=immediate, timer=timer) # We cannot return fp or fp.__call__ directly as that would break method # definitions, instead we need to return a plain function. + def new_fn(*args, **kw): return fp(*args, **kw) new_fn.__doc__ = fn.__doc__ @@ -716,18 +748,45 @@ def __call__(self, *args, **kw): self.totaltime += duration if self.immediate: funcname = fn.__name__ - filename = fn.func_code.co_filename - lineno = fn.func_code.co_firstlineno - print >> sys.stderr, "\n %s (%s:%s):\n %.3f seconds\n" % ( - funcname, filename, lineno, duration) + filename = fn.__code__.co_filename + lineno = fn.__code__.co_firstlineno + sys.stderr.write("\n %s (%s:%s):\n %.3f seconds\n\n" % ( + funcname, filename, lineno, duration + )) + sys.stderr.flush() + def atexit(self): if not self.ncalls: return funcname = self.fn.__name__ - filename = self.fn.func_code.co_filename - lineno = self.fn.func_code.co_firstlineno - print ("\n %s (%s:%s):\n" - " %d calls, %.3f seconds (%.3f seconds per call)\n" % ( - funcname, filename, lineno, self.ncalls, - self.totaltime, self.totaltime / self.ncalls)) - + filename = self.fn.__code__.co_filename + lineno = self.fn.__code__.co_firstlineno + print("\n %s (%s:%s):\n" + " %d calls, %.3f seconds (%.3f seconds per call)\n" % ( + funcname, filename, lineno, self.ncalls, + self.totaltime, self.totaltime / self.ncalls) + ) + +if __name__ == '__main__': + + local = dict((name, globals()[name]) for name in __all__) + message = """******** +Injected `profilehooks` +-------- +{} +******** +""".format("\n".join(local.keys())) + + def interact_(): + from code import interact + interact(message, local=local) + + def run_(): + from runpy import run_module + print(message) + run_module(sys.argv[1], init_globals=local) + + if len(sys.argv) == 1: + interact_() + else: + run_() diff --git a/lib/pyasn1/LICENSE b/lib/pyasn1/LICENSE deleted file mode 100644 index fac589b8cd..0000000000 --- a/lib/pyasn1/LICENSE +++ /dev/null @@ -1,24 +0,0 @@ -Copyright (c) 2005-2013, Ilya Etingof -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE -LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -POSSIBILITY OF SUCH DAMAGE. diff --git a/lib/pyasn1/__init__.py b/lib/pyasn1/__init__.py index 88aff79c84..5f0930008a 100644 --- a/lib/pyasn1/__init__.py +++ b/lib/pyasn1/__init__.py @@ -1,8 +1,8 @@ import sys # http://www.python.org/dev/peps/pep-0396/ -__version__ = '0.1.7' +__version__ = '0.1.9' if sys.version_info[:2] < (2, 4): - raise RuntimeError('PyASN1 requires Python 2.4 or later') + raise RuntimeError('PyASN1 requires Python 2.4 or later') diff --git a/lib/pyasn1/codec/ber/decoder.py b/lib/pyasn1/codec/ber/decoder.py index be0cf49074..61bfbce7e7 100644 --- a/lib/pyasn1/codec/ber/decoder.py +++ b/lib/pyasn1/codec/ber/decoder.py @@ -1,7 +1,7 @@ # BER decoder -from pyasn1.type import tag, base, univ, char, useful, tagmap +from pyasn1.type import tag, univ, char, useful, tagmap from pyasn1.codec.ber import eoo -from pyasn1.compat.octets import oct2int, octs2ints, isOctetsType +from pyasn1.compat.octets import oct2int, isOctetsType from pyasn1 import debug, error class AbstractDecoder: @@ -11,14 +11,14 @@ def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, raise error.PyAsn1Error('Decoder not implemented for %s' % (tagSet,)) def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, - length, state, decodeFun, substrateFun): + length, state, decodeFun, substrateFun): raise error.PyAsn1Error('Indefinite length mode decoder not implemented for %s' % (tagSet,)) class AbstractSimpleDecoder(AbstractDecoder): tagFormats = (tag.tagFormatSimple,) def _createComponent(self, asn1Spec, tagSet, value=None): if tagSet[0][1] not in self.tagFormats: - raise error.PyAsn1Error('Invalid tag format %r for %r' % (tagSet[0], self.protoComponent,)) + raise error.PyAsn1Error('Invalid tag format %s for %s' % (tagSet[0], self.protoComponent.prettyPrintType())) if asn1Spec is None: return self.protoComponent.clone(value, tagSet) elif value is None: @@ -30,17 +30,12 @@ class AbstractConstructedDecoder(AbstractDecoder): tagFormats = (tag.tagFormatConstructed,) def _createComponent(self, asn1Spec, tagSet, value=None): if tagSet[0][1] not in self.tagFormats: - raise error.PyAsn1Error('Invalid tag format %r for %r' % (tagSet[0], self.protoComponent,)) + raise error.PyAsn1Error('Invalid tag format %s for %s' % (tagSet[0], self.protoComponent.prettyPrintType())) if asn1Spec is None: return self.protoComponent.clone(tagSet) else: return asn1Spec.clone() -class EndOfOctetsDecoder(AbstractSimpleDecoder): - def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, - length, state, decodeFun, substrateFun): - return eoo.endOfOctets, substrate[length:] - class ExplicitTagDecoder(AbstractSimpleDecoder): protoComponent = univ.Any('') tagFormats = (tag.tagFormatConstructed,) @@ -63,7 +58,7 @@ def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, substrate, length ) value, substrate = decodeFun(substrate, asn1Spec, tagSet, length) - terminator, substrate = decodeFun(substrate) + terminator, substrate = decodeFun(substrate, allowEoo=True) if eoo.endOfOctets.isSameTypeWith(terminator) and \ terminator == eoo.endOfOctets: return value, substrate @@ -129,14 +124,14 @@ def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, 'Trailing bits overflow %s' % trailingBits ) head = head[1:] - lsb = p = 0; l = len(head)-1; b = () + lsb = p = 0; l = len(head)-1; b = [] while p <= l: if p == l: lsb = trailingBits j = 7 o = oct2int(head[p]) while j >= lsb: - b = b + ((o>>j)&0x01,) + b.append((o>>j)&0x01) j = j - 1 p = p + 1 return self._createComponent(asn1Spec, tagSet, b), tail @@ -144,7 +139,7 @@ def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, if substrateFun: return substrateFun(r, substrate, length) while head: - component, head = decodeFun(head) + component, head = decodeFun(head, self.protoComponent) r = r + component return r, tail @@ -154,7 +149,8 @@ def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, if substrateFun: return substrateFun(r, substrate, length) while substrate: - component, substrate = decodeFun(substrate) + component, substrate = decodeFun(substrate, self.protoComponent, + allowEoo=True) if eoo.endOfOctets.isSameTypeWith(component) and \ component == eoo.endOfOctets: break @@ -177,7 +173,7 @@ def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, if substrateFun: return substrateFun(r, substrate, length) while head: - component, head = decodeFun(head) + component, head = decodeFun(head, self.protoComponent) r = r + component return r, tail @@ -187,7 +183,8 @@ def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, if substrateFun: return substrateFun(r, substrate, length) while substrate: - component, substrate = decodeFun(substrate) + component, substrate = decodeFun(substrate, self.protoComponent, + allowEoo=True) if eoo.endOfOctets.isSameTypeWith(component) and \ component == eoo.endOfOctets: break @@ -216,20 +213,14 @@ def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, if not head: raise error.PyAsn1Error('Empty substrate') - # Get the first subid - subId = oct2int(head[0]) - oid = divmod(subId, 40) - - index = 1 + oid = () + index = 0 substrateLen = len(head) while index < substrateLen: subId = oct2int(head[index]) - index = index + 1 - if subId == 128: - # ASN.1 spec forbids leading zeros (0x80) in sub-ID OID - # encoding, tolerating it opens a vulnerability. - # See http://www.cosic.esat.kuleuven.be/publications/article-1432.pdf page 7 - raise error.PyAsn1Error('Invalid leading 0x80 in sub-OID') + index += 1 + if subId < 128: + oid = oid + (subId,) elif subId > 128: # Construct subid from a number of octets nextSubId = subId @@ -239,11 +230,27 @@ def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, if index >= substrateLen: raise error.SubstrateUnderrunError( 'Short substrate for sub-OID past %s' % (oid,) - ) + ) nextSubId = oct2int(head[index]) - index = index + 1 - subId = (subId << 7) + nextSubId - oid = oid + (subId,) + index += 1 + oid = oid + ((subId << 7) + nextSubId,) + elif subId == 128: + # ASN.1 spec forbids leading zeros (0x80) in OID + # encoding, tolerating it opens a vulnerability. See + # http://www.cosic.esat.kuleuven.be/publications/article-1432.pdf + # page 7 + raise error.PyAsn1Error('Invalid octet 0x80 in OID encoding') + + # Decode two leading arcs + if 0 <= oid[0] <= 39: + oid = (0,) + oid + elif 40 <= oid[0] <= 79: + oid = (1, oid[0]-40) + oid[1:] + elif oid[0] >= 80: + oid = (2, oid[0]-80) + oid[1:] + else: + raise error.PyAsn1Error('Malformed first OID octet: %s' % head[0]) + return self._createComponent(asn1Spec, tagSet, oid), tail class RealDecoder(AbstractSimpleDecoder): @@ -254,10 +261,13 @@ def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, if not head: return self._createComponent(asn1Spec, tagSet, 0.0), tail fo = oct2int(head[0]); head = head[1:] - if fo & 0x80: # binary enoding + if fo & 0x80: # binary encoding + if not head: + raise error.PyAsn1Error("Incomplete floating-point value") n = (fo & 0x03) + 1 if n == 4: n = oct2int(head[0]) + head = head[1:] eo, head = head[:n], head[n:] if not eo or not head: raise error.PyAsn1Error('Real exponent screwed') @@ -266,6 +276,13 @@ def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, e <<= 8 e |= oct2int(eo[0]) eo = eo[1:] + b = fo >> 4 & 0x03 # base bits + if b > 2: + raise error.PyAsn1Error('Illegal Real base') + if b == 1: # encbase = 8 + e *= 3 + elif b == 2: # encbase = 16 + e *= 4 p = 0 while head: # value p <<= 8 @@ -273,10 +290,14 @@ def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, head = head[1:] if fo & 0x40: # sign bit p = -p + sf = fo >> 2 & 0x03 # scale bits + p *= 2**sf value = (p, 2, e) elif fo & 0x40: # infinite value value = fo & 0x01 and '-inf' or 'inf' elif fo & 0xc0 == 0: # character encoding + if not head: + raise error.PyAsn1Error("Incomplete floating-point value") try: if fo & 0x3 == 0x1: # NR1 value = (int(head), 10, 0) @@ -336,7 +357,7 @@ def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, idx = 0 while substrate: asn1Spec = self._getComponentTagMap(r, idx) - component, substrate = decodeFun(substrate, asn1Spec) + component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True) if eoo.endOfOctets.isSameTypeWith(component) and \ component == eoo.endOfOctets: break @@ -378,7 +399,7 @@ def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, asn1Spec = r.getComponentType() idx = 0 while substrate: - component, substrate = decodeFun(substrate, asn1Spec) + component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True) if eoo.endOfOctets.isSameTypeWith(component) and \ component == eoo.endOfOctets: break @@ -437,7 +458,8 @@ def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, return substrateFun(r, substrate, length) if r.getTagSet() == tagSet: # explicitly tagged Choice component, substrate = decodeFun(substrate, r.getComponentTagMap()) - eooMarker, substrate = decodeFun(substrate) # eat up EOO marker + # eat up EOO marker + eooMarker, substrate = decodeFun(substrate, allowEoo=True) if not eoo.endOfOctets.isSameTypeWith(eooMarker) or \ eooMarker != eoo.endOfOctets: raise error.PyAsn1Error('No EOO seen before substrate ends') @@ -485,7 +507,7 @@ def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, if substrateFun: return substrateFun(r, substrate, length) while substrate: - component, substrate = decodeFun(substrate, asn1Spec) + component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True) if eoo.endOfOctets.isSameTypeWith(component) and \ component == eoo.endOfOctets: break @@ -521,13 +543,14 @@ class BMPStringDecoder(OctetStringDecoder): protoComponent = char.BMPString() # "useful" types +class ObjectDescriptorDecoder(OctetStringDecoder): + protoComponent = useful.ObjectDescriptor() class GeneralizedTimeDecoder(OctetStringDecoder): protoComponent = useful.GeneralizedTime() class UTCTimeDecoder(OctetStringDecoder): protoComponent = useful.UTCTime() tagMap = { - eoo.endOfOctets.tagSet: EndOfOctetsDecoder(), univ.Integer.tagSet: IntegerDecoder(), univ.Boolean.tagSet: BooleanDecoder(), univ.BitString.tagSet: BitStringDecoder(), @@ -552,9 +575,10 @@ class UTCTimeDecoder(OctetStringDecoder): char.UniversalString.tagSet: UniversalStringDecoder(), char.BMPString.tagSet: BMPStringDecoder(), # useful types + useful.ObjectDescriptor.tagSet: ObjectDescriptorDecoder(), useful.GeneralizedTime.tagSet: GeneralizedTimeDecoder(), useful.UTCTime.tagSet: UTCTimeDecoder() - } +} # Type-to-codec map for ambiguous ASN.1 types typeMap = { @@ -564,7 +588,7 @@ class UTCTimeDecoder(OctetStringDecoder): univ.SequenceOf.typeId: SequenceOfDecoder(), univ.Choice.typeId: ChoiceDecoder(), univ.Any.typeId: AnyDecoder() - } +} ( stDecodeTag, stDecodeLength, stGetValueDecoder, stGetValueDecoderByAsn1Spec, stGetValueDecoderByTag, stTryAsExplicitTag, stDecodeValue, @@ -574,23 +598,22 @@ class Decoder: defaultErrorState = stErrorCondition # defaultErrorState = stDumpRawValue defaultRawDecoder = AnyDecoder() + supportIndefLength = True def __init__(self, tagMap, typeMap={}): self.__tagMap = tagMap self.__typeMap = typeMap - self.__endOfOctetsTagSet = eoo.endOfOctets.getTagSet() # Tag & TagSet objects caches self.__tagCache = {} self.__tagSetCache = {} def __call__(self, substrate, asn1Spec=None, tagSet=None, length=None, state=stDecodeTag, recursiveFlag=1, - substrateFun=None): + substrateFun=None, allowEoo=False): if debug.logger & debug.flagDecoder: debug.logger('decoder called at scope %s with state %d, working with up to %d octets of substrate: %s' % (debug.scope, state, len(substrate), debug.hexdump(substrate))) fullSubstrate = substrate while state != stStop: if state == stDecodeTag: - # Decode tag if not substrate: raise error.SubstrateUnderrunError( 'Short octet stream on tag decoding' @@ -598,13 +621,25 @@ def __call__(self, substrate, asn1Spec=None, tagSet=None, if not isOctetsType(substrate) and \ not isinstance(substrate, univ.OctetString): raise error.PyAsn1Error('Bad octet stream type') - + # Decode tag firstOctet = substrate[0] substrate = substrate[1:] if firstOctet in self.__tagCache: lastTag = self.__tagCache[firstOctet] else: t = oct2int(firstOctet) + # Look for end-of-octets sentinel + if t == 0: + if substrate and oct2int(substrate[0]) == 0: + if allowEoo and self.supportIndefLength: + debug.logger and debug.logger & debug.flagDecoder and debug.logger('end-of-octets sentinel found') + value, substrate = eoo.endOfOctets, substrate[1:] + state = stStop + continue + else: + raise error.PyAsn1Error('Unexpected end-of-contents sentinel') + else: + raise error.PyAsn1Error('Zero tag encountered') tagClass = t&0xC0 tagFormat = t&0x20 tagId = t&0x1F @@ -622,7 +657,7 @@ def __call__(self, substrate, asn1Spec=None, tagSet=None, break lastTag = tag.Tag( tagClass=tagClass, tagFormat=tagFormat, tagId=tagId - ) + ) if tagId < 31: # cache short tags self.__tagCache[firstOctet] = lastTag @@ -637,13 +672,13 @@ def __call__(self, substrate, asn1Spec=None, tagSet=None, else: tagSet = lastTag + tagSet state = stDecodeLength - debug.logger and debug.logger & debug.flagDecoder and debug.logger('tag decoded into %r, decoding length' % tagSet) + debug.logger and debug.logger & debug.flagDecoder and debug.logger('tag decoded into %s, decoding length' % tagSet) if state == stDecodeLength: # Decode length if not substrate: - raise error.SubstrateUnderrunError( - 'Short octet stream on length decoding' - ) + raise error.SubstrateUnderrunError( + 'Short octet stream on length decoding' + ) firstOctet = oct2int(substrate[0]) if firstOctet == 128: size = 1 @@ -670,6 +705,8 @@ def __call__(self, substrate, asn1Spec=None, tagSet=None, raise error.SubstrateUnderrunError( '%d-octet short' % (length - len(substrate)) ) + if length == -1 and not self.supportIndefLength: + error.PyAsn1Error('Indefinite length encoding not supported by this codec') state = stGetValueDecoder debug.logger and debug.logger & debug.flagDecoder and debug.logger('value length decoded into %d, payload substrate is: %s' % (length, debug.hexdump(length == -1 and substrate or substrate[:length]))) if state == stGetValueDecoder: @@ -722,12 +759,12 @@ def __call__(self, substrate, asn1Spec=None, tagSet=None, if debug.logger and debug.logger & debug.flagDecoder: debug.logger('candidate ASN.1 spec is a map of:') for t, v in asn1Spec.getPosMap().items(): - debug.logger(' %r -> %s' % (t, v.__class__.__name__)) + debug.logger(' %s -> %s' % (t, v.__class__.__name__)) if asn1Spec.getNegMap(): debug.logger('but neither of: ') - for i in asn1Spec.getNegMap().items(): - debug.logger(' %r -> %s' % (t, v.__class__.__name__)) - debug.logger('new candidate ASN.1 spec is %s, chosen by %r' % (__chosenSpec is None and '' or __chosenSpec.__class__.__name__, tagSet)) + for t, v in asn1Spec.getNegMap().items(): + debug.logger(' %s -> %s' % (t, v.__class__.__name__)) + debug.logger('new candidate ASN.1 spec is %s, chosen by %s' % (__chosenSpec is None and '' or __chosenSpec.prettyPrintType(), tagSet)) else: __chosenSpec = asn1Spec debug.logger and debug.logger & debug.flagDecoder and debug.logger('candidate ASN.1 spec is %s' % asn1Spec.__class__.__name__) @@ -745,7 +782,7 @@ def __call__(self, substrate, asn1Spec=None, tagSet=None, elif baseTagSet in self.__tagMap: # base type or tagged subtype concreteDecoder = self.__tagMap[baseTagSet] - debug.logger and debug.logger & debug.flagDecoder and debug.logger('value decoder chosen by base %r' % (baseTagSet,)) + debug.logger and debug.logger & debug.flagDecoder and debug.logger('value decoder chosen by base %s' % (baseTagSet,)) else: concreteDecoder = None if concreteDecoder: @@ -753,10 +790,6 @@ def __call__(self, substrate, asn1Spec=None, tagSet=None, state = stDecodeValue else: state = stTryAsExplicitTag - elif tagSet == self.__endOfOctetsTagSet: - concreteDecoder = self.__tagMap[tagSet] - state = stDecodeValue - debug.logger and debug.logger & debug.flagDecoder and debug.logger('end-of-octets found') else: concreteDecoder = None state = stTryAsExplicitTag @@ -795,8 +828,8 @@ def __call__(self, substrate, asn1Spec=None, tagSet=None, debug.logger and debug.logger & debug.flagDecoder and debug.logger('codec %s yields type %s, value:\n%s\n...remaining substrate is: %s' % (concreteDecoder.__class__.__name__, value.__class__.__name__, value.prettyPrint(), substrate and debug.hexdump(substrate) or '')) if state == stErrorCondition: raise error.PyAsn1Error( - '%r not in asn1Spec: %r' % (tagSet, asn1Spec) - ) + '%s not in asn1Spec: %s' % (tagSet, asn1Spec) + ) if debug.logger and debug.logger & debug.flagDecoder: debug.scope.pop() debug.logger('decoder left scope %s, call completed' % debug.scope) diff --git a/lib/pyasn1/codec/ber/encoder.py b/lib/pyasn1/codec/ber/encoder.py index 173949d0b6..0fb4ae71e8 100644 --- a/lib/pyasn1/codec/ber/encoder.py +++ b/lib/pyasn1/codec/ber/encoder.py @@ -114,13 +114,17 @@ def encodeValue(self, encodeFun, value, defMode, maxChunkSize): class BitStringEncoder(AbstractItemEncoder): def encodeValue(self, encodeFun, value, defMode, maxChunkSize): if not maxChunkSize or len(value) <= maxChunkSize*8: - r = {}; l = len(value); p = 0; j = 7 - while p < l: - i, j = divmod(p, 8) - r[i] = r.get(i,0) | value[p]<<(7-j) - p = p + 1 - keys = list(r); keys.sort() - return int2oct(7-j) + ints2octs([r[k] for k in keys]), 0 + out_len = (len(value) + 7) // 8 + out_list = out_len * [0] + j = 7 + i = -1 + for val in value: + j += 1 + if j == 8: + i += 1 + j = 0 + out_list[i] = out_list[i] | val << (7-j) + return int2oct(7-j) + ints2octs(out_list), 0 else: pos = 0; substrate = null while 1: @@ -156,47 +160,98 @@ class ObjectIdentifierEncoder(AbstractItemEncoder): precomputedValues = { (1, 3, 6, 1, 2): (43, 6, 1, 2), (1, 3, 6, 1, 4): (43, 6, 1, 4) - } + } def encodeValue(self, encodeFun, value, defMode, maxChunkSize): oid = value.asTuple() if oid[:5] in self.precomputedValues: octets = self.precomputedValues[oid[:5]] - index = 5 + oid = oid[5:] else: if len(oid) < 2: raise error.PyAsn1Error('Short OID %s' % (value,)) + octets = () + # Build the first twos - if oid[0] > 6 or oid[1] > 39 or oid[0] == 6 and oid[1] > 15: + if oid[0] == 0 and 0 <= oid[1] <= 39: + oid = (oid[1],) + oid[2:] + elif oid[0] == 1 and 0 <= oid[1] <= 39: + oid = (oid[1] + 40,) + oid[2:] + elif oid[0] == 2: + oid = (oid[1] + 80,) + oid[2:] + else: raise error.PyAsn1Error( - 'Initial sub-ID overflow %s in OID %s' % (oid[:2], value) + 'Impossible initial arcs %s at %s' % (oid[:2], value) ) - octets = (oid[0] * 40 + oid[1],) - index = 2 - # Cycle through subids - for subid in oid[index:]: - if subid > -1 and subid < 128: + # Cycle through subIds + for subId in oid: + if subId > -1 and subId < 128: # Optimize for the common case - octets = octets + (subid & 0x7f,) - elif subid < 0 or subid > 0xFFFFFFFF: + octets = octets + (subId & 0x7f,) + elif subId < 0: raise error.PyAsn1Error( - 'SubId overflow %s in %s' % (subid, value) - ) + 'Negative OID arc %s at %s' % (subId, value) + ) else: # Pack large Sub-Object IDs - res = (subid & 0x7f,) - subid = subid >> 7 - while subid > 0: - res = (0x80 | (subid & 0x7f),) + res - subid = subid >> 7 + res = (subId & 0x7f,) + subId = subId >> 7 + while subId > 0: + res = (0x80 | (subId & 0x7f),) + res + subId = subId >> 7 # Add packed Sub-Object ID to resulted Object ID octets += res - + return ints2octs(octets), 0 class RealEncoder(AbstractItemEncoder): supportIndefLenMode = 0 + binEncBase = 2 # set to None to choose encoding base automatically + def _dropFloatingPoint(self, m, encbase, e): + ms, es = 1, 1 + if m < 0: + ms = -1 # mantissa sign + if e < 0: + es = -1 # exponenta sign + m *= ms + if encbase == 8: + m = m*2**(abs(e) % 3 * es) + e = abs(e) // 3 * es + elif encbase == 16: + m = m*2**(abs(e) % 4 * es) + e = abs(e) // 4 * es + + while 1: + if int(m) != m: + m *= encbase + e -= 1 + continue + break + return ms, int(m), encbase, e + + def _chooseEncBase(self, value): + m, b, e = value + base = [2, 8, 16] + if value.binEncBase in base: + return self._dropFloatingPoint(m, value.binEncBase, e) + elif self.binEncBase in base: + return self._dropFloatingPoint(m, self.binEncBase, e) + # auto choosing base 2/8/16 + mantissa = [m, m, m] + exponenta = [e, e, e] + encbase = 2 + e = float('inf') + for i in range(3): + sign, mantissa[i], base[i], exponenta[i] = \ + self._dropFloatingPoint(mantissa[i], base[i], exponenta[i]) + if abs(exponenta[i]) < abs(e) or \ + (abs(exponenta[i]) == abs(e) and mantissa[i] < m): + e = exponenta[i] + m = int(mantissa[i]) + encbase = base[i] + return sign, m, encbase, e + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): if value.isPlusInfinity(): return int2oct(0x40), 0 @@ -208,22 +263,43 @@ def encodeValue(self, encodeFun, value, defMode, maxChunkSize): if b == 10: return str2octs('\x03%dE%s%d' % (m, e == 0 and '+' or '', e)), 0 elif b == 2: - fo = 0x80 # binary enoding - if m < 0: - fo = fo | 0x40 # sign bit - m = -m - while int(m) != m: # drop floating point - m *= 2 - e -= 1 - while m & 0x1 == 0: # mantissa normalization + fo = 0x80 # binary encoding + ms, m, encbase, e = self._chooseEncBase(value) + if ms < 0: # mantissa sign + fo = fo | 0x40 # sign bit + # exponenta & mantissa normalization + if encbase == 2: + while m & 0x1 == 0: + m >>= 1 + e += 1 + elif encbase == 8: + while m & 0x7 == 0: + m >>= 3 + e += 1 + fo |= 0x10 + else: # encbase = 16 + while m & 0xf == 0: + m >>= 4 + e += 1 + fo |= 0x20 + sf = 0 # scale factor + while m & 0x1 == 0: m >>= 1 - e += 1 + sf += 1 + if sf > 3: + raise error.PyAsn1Error('Scale factor overflow') # bug if raised + fo |= sf << 2 eo = null - while e not in (0, -1): - eo = int2oct(e&0xff) + eo - e >>= 8 - if e == 0 and eo and oct2int(eo[0]) & 0x80: - eo = int2oct(0) + eo + if e == 0 or e == -1: + eo = int2oct(e&0xff) + else: + while e not in (0, -1): + eo = int2oct(e&0xff) + eo + e >>= 8 + if e == 0 and eo and oct2int(eo[0]) & 0x80: + eo = int2oct(0) + eo + if e == -1 and eo and not (oct2int(eo[0]) & 0x80): + eo = int2oct(0xff) + eo n = len(eo) if n > 0xff: raise error.PyAsn1Error('Real exponent overflow') @@ -235,7 +311,7 @@ def encodeValue(self, encodeFun, value, defMode, maxChunkSize): fo |= 2 else: fo |= 3 - eo = int2oct(n//0xff+1) + eo + eo = int2oct(n&0xff) + eo po = null while m: po = int2oct(m&0xff) + po @@ -308,6 +384,7 @@ def encodeValue(self, encodeFun, value, defMode, maxChunkSize): char.UniversalString.tagSet: OctetStringEncoder(), char.BMPString.tagSet: OctetStringEncoder(), # useful types + useful.ObjectDescriptor.tagSet: OctetStringEncoder(), useful.GeneralizedTime.tagSet: OctetStringEncoder(), useful.UTCTime.tagSet: OctetStringEncoder() } @@ -323,12 +400,15 @@ def encodeValue(self, encodeFun, value, defMode, maxChunkSize): } class Encoder: + supportIndefLength = True def __init__(self, tagMap, typeMap={}): self.__tagMap = tagMap self.__typeMap = typeMap - def __call__(self, value, defMode=1, maxChunkSize=0): - debug.logger & debug.flagEncoder and debug.logger('encoder called in %sdef mode, chunk size %s for type %s, value:\n%s' % (not defMode and 'in' or '', maxChunkSize, value.__class__.__name__, value.prettyPrint())) + def __call__(self, value, defMode=True, maxChunkSize=0): + if not defMode and not self.supportIndefLength: + raise error.PyAsn1Error('Indefinite length encoding not supported by this codec') + debug.logger & debug.flagEncoder and debug.logger('encoder called in %sdef mode, chunk size %s for type %s, value:\n%s' % (not defMode and 'in' or '', maxChunkSize, value.prettyPrintType(), value.prettyPrint())) tagSet = value.getTagSet() if len(tagSet) > 1: concreteEncoder = explicitlyTaggedItemEncoder @@ -343,7 +423,7 @@ def __call__(self, value, defMode=1, maxChunkSize=0): concreteEncoder = self.__tagMap[tagSet] else: raise Error('No encoder for %s' % (value,)) - debug.logger & debug.flagEncoder and debug.logger('using value codec %s chosen by %r' % (concreteEncoder.__class__.__name__, tagSet)) + debug.logger & debug.flagEncoder and debug.logger('using value codec %s chosen by %s' % (concreteEncoder.__class__.__name__, tagSet)) substrate = concreteEncoder.encode( self, value, defMode, maxChunkSize ) diff --git a/lib/pyasn1/codec/cer/decoder.py b/lib/pyasn1/codec/cer/decoder.py index 9fd37c1347..1770cd8793 100644 --- a/lib/pyasn1/codec/cer/decoder.py +++ b/lib/pyasn1/codec/cer/decoder.py @@ -9,8 +9,8 @@ class BooleanDecoder(decoder.AbstractSimpleDecoder): def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, state, decodeFun, substrateFun): head, tail = substrate[:length], substrate[length:] - if not head: - raise error.PyAsn1Error('Empty substrate') + if not head or length != 1: + raise error.PyAsn1Error('Not single-octet Boolean payload') byte = oct2int(head[0]) # CER/DER specifies encoding of TRUE as 0xFF and FALSE as 0x0, while # BER allows any non-zero value as TRUE; cf. sections 8.2.2. and 11.1 @@ -20,7 +20,7 @@ def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, elif byte == 0x00: value = 0 else: - raise error.PyAsn1Error('Boolean CER violation: %s' % byte) + raise error.PyAsn1Error('Unexpected Boolean payload: %s' % byte) return self._createComponent(asn1Spec, tagSet, value), tail tagMap = decoder.tagMap.copy() diff --git a/lib/pyasn1/codec/cer/encoder.py b/lib/pyasn1/codec/cer/encoder.py index 4c05130af9..61ce8a1507 100644 --- a/lib/pyasn1/codec/cer/encoder.py +++ b/lib/pyasn1/codec/cer/encoder.py @@ -1,7 +1,9 @@ # CER encoder from pyasn1.type import univ +from pyasn1.type import useful from pyasn1.codec.ber import encoder -from pyasn1.compat.octets import int2oct, null +from pyasn1.compat.octets import int2oct, str2octs, null +from pyasn1 import error class BooleanEncoder(encoder.IntegerEncoder): def encodeValue(self, encodeFun, client, defMode, maxChunkSize): @@ -15,18 +17,56 @@ class BitStringEncoder(encoder.BitStringEncoder): def encodeValue(self, encodeFun, client, defMode, maxChunkSize): return encoder.BitStringEncoder.encodeValue( self, encodeFun, client, defMode, 1000 - ) + ) class OctetStringEncoder(encoder.OctetStringEncoder): def encodeValue(self, encodeFun, client, defMode, maxChunkSize): return encoder.OctetStringEncoder.encodeValue( self, encodeFun, client, defMode, 1000 - ) + ) + +class RealEncoder(encoder.RealEncoder): + def _chooseEncBase(self, value): + m, b, e = value + return self._dropFloatingPoint(m, b, e) -# specialized RealEncoder here # specialized GeneralStringEncoder here -# specialized GeneralizedTimeEncoder here -# specialized UTCTimeEncoder here + +class GeneralizedTimeEncoder(OctetStringEncoder): + zchar = str2octs('Z') + pluschar = str2octs('+') + minuschar = str2octs('-') + zero = str2octs('0') + def encodeValue(self, encodeFun, client, defMode, maxChunkSize): + octets = client.asOctets() +# This breaks too many existing data items +# if '.' not in octets: +# raise error.PyAsn1Error('Format must include fraction of second: %r' % octets) + if len(octets) < 15: + raise error.PyAsn1Error('Bad UTC time length: %r' % octets) + if self.pluschar in octets or self.minuschar in octets: + raise error.PyAsn1Error('Must be UTC time: %r' % octets) + if octets[-1] != self.zchar[0]: + raise error.PyAsn1Error('Missing timezone specifier: %r' % octets) + return encoder.OctetStringEncoder.encodeValue( + self, encodeFun, client, defMode, 1000 + ) + +class UTCTimeEncoder(encoder.OctetStringEncoder): + zchar = str2octs('Z') + pluschar = str2octs('+') + minuschar = str2octs('-') + def encodeValue(self, encodeFun, client, defMode, maxChunkSize): + octets = client.asOctets() + if self.pluschar in octets or self.minuschar in octets: + raise error.PyAsn1Error('Must be UTC time: %r' % octets) + if octets and octets[-1] != self.zchar[0]: + client = client.clone(octets + self.zchar) + if len(client) != 13: + raise error.PyAsn1Error('Bad UTC time length: %r' % client) + return encoder.OctetStringEncoder.encodeValue( + self, encodeFun, client, defMode, 1000 + ) class SetOfEncoder(encoder.SequenceOfEncoder): def encodeValue(self, encodeFun, client, defMode, maxChunkSize): @@ -69,17 +109,20 @@ def encodeValue(self, encodeFun, client, defMode, maxChunkSize): univ.Boolean.tagSet: BooleanEncoder(), univ.BitString.tagSet: BitStringEncoder(), univ.OctetString.tagSet: OctetStringEncoder(), + univ.Real.tagSet: RealEncoder(), + useful.GeneralizedTime.tagSet: GeneralizedTimeEncoder(), + useful.UTCTime.tagSet: UTCTimeEncoder(), univ.SetOf().tagSet: SetOfEncoder() # conflcts with Set - }) +}) typeMap = encoder.typeMap.copy() typeMap.update({ univ.Set.typeId: SetOfEncoder(), univ.SetOf.typeId: SetOfEncoder() - }) +}) class Encoder(encoder.Encoder): - def __call__(self, client, defMode=0, maxChunkSize=0): + def __call__(self, client, defMode=False, maxChunkSize=0): return encoder.Encoder.__call__(self, client, defMode, maxChunkSize) encode = Encoder(tagMap, typeMap) diff --git a/lib/pyasn1/codec/der/decoder.py b/lib/pyasn1/codec/der/decoder.py index 604abec2bc..ea58d6d758 100644 --- a/lib/pyasn1/codec/der/decoder.py +++ b/lib/pyasn1/codec/der/decoder.py @@ -1,9 +1,9 @@ # DER decoder -from pyasn1.type import univ from pyasn1.codec.cer import decoder tagMap = decoder.tagMap typeMap = decoder.typeMap -Decoder = decoder.Decoder +class Decoder(decoder.Decoder): + supportIndefLength = False decode = Decoder(tagMap, typeMap) diff --git a/lib/pyasn1/codec/der/encoder.py b/lib/pyasn1/codec/der/encoder.py index 4e5faefad4..7f55eeb9d3 100644 --- a/lib/pyasn1/codec/der/encoder.py +++ b/lib/pyasn1/codec/der/encoder.py @@ -1,6 +1,7 @@ # DER encoder from pyasn1.type import univ from pyasn1.codec.cer import encoder +from pyasn1 import error class SetOfEncoder(encoder.SetOfEncoder): def _cmpSetComponents(self, c1, c2): @@ -12,17 +13,20 @@ def _cmpSetComponents(self, c1, c2): tagMap = encoder.tagMap.copy() tagMap.update({ - # Overload CER encodrs with BER ones (a bit hackerish XXX) + # Overload CER encoders with BER ones (a bit hackerish XXX) univ.BitString.tagSet: encoder.encoder.BitStringEncoder(), univ.OctetString.tagSet: encoder.encoder.OctetStringEncoder(), # Set & SetOf have same tags univ.SetOf().tagSet: SetOfEncoder() - }) +}) typeMap = encoder.typeMap class Encoder(encoder.Encoder): - def __call__(self, client, defMode=1, maxChunkSize=0): + supportIndefLength = False + def __call__(self, client, defMode=True, maxChunkSize=0): + if not defMode: + raise error.PyAsn1Error('DER forbids indefinite length mode') return encoder.Encoder.__call__(self, client, defMode, maxChunkSize) - + encode = Encoder(tagMap, typeMap) diff --git a/lib/pyasn1/compat/binary.py b/lib/pyasn1/compat/binary.py new file mode 100644 index 0000000000..b38932afd5 --- /dev/null +++ b/lib/pyasn1/compat/binary.py @@ -0,0 +1,10 @@ +from sys import version_info + +if version_info[0:2] < (2, 6): + def bin(x): + if x <= 1: + return '0b'+str(x) + else: + return bin(x>>1) + str(x&1) +else: + bin = bin diff --git a/lib/pyasn1/compat/octets.py b/lib/pyasn1/compat/octets.py index f7f2a29bf5..e8127370c5 100644 --- a/lib/pyasn1/compat/octets.py +++ b/lib/pyasn1/compat/octets.py @@ -9,6 +9,7 @@ str2octs = lambda x: x octs2str = lambda x: x isOctetsType = lambda s: isinstance(s, str) + isStringType = lambda s: isinstance(s, (str, unicode)) else: ints2octs = bytes int2oct = lambda x: ints2octs((x,)) @@ -18,3 +19,4 @@ str2octs = lambda x: x.encode() octs2str = lambda x: x.decode() isOctetsType = lambda s: isinstance(s, bytes) + isStringType = lambda s: isinstance(s, str) diff --git a/lib/pyasn1/debug.py b/lib/pyasn1/debug.py index c27cb1d446..9b69886c04 100644 --- a/lib/pyasn1/debug.py +++ b/lib/pyasn1/debug.py @@ -1,4 +1,5 @@ -import sys +import time +import logging from pyasn1.compat.octets import octs2ints from pyasn1 import error from pyasn1 import __version__ @@ -14,23 +15,67 @@ 'all': flagAll } +class Printer: + def __init__(self, logger=None, handler=None, formatter=None): + if logger is None: + logger = logging.getLogger('pyasn1') + logger.setLevel(logging.DEBUG) + if handler is None: + handler = logging.StreamHandler() + if formatter is None: + formatter = logging.Formatter('%(asctime)s %(name)s: %(message)s') + handler.setFormatter(formatter) + handler.setLevel(logging.DEBUG) + logger.addHandler(handler) + self.__logger = logger + + def __call__(self, msg): self.__logger.debug(msg) + def __str__(self): return '' + +if hasattr(logging, 'NullHandler'): + NullHandler = logging.NullHandler +else: + # Python 2.6 and older + class NullHandler(logging.Handler): + def emit(self, record): + pass + class Debug: - defaultPrinter = sys.stderr.write - def __init__(self, *flags): + defaultPrinter = None + def __init__(self, *flags, **options): self._flags = flagNone - self._printer = self.defaultPrinter + if options.get('printer') is not None: + self._printer = options.get('printer') + elif self.defaultPrinter is not None: + self._printer = self.defaultPrinter + if 'loggerName' in options: + # route our logs to parent logger + self._printer = Printer( + logger=logging.getLogger(options['loggerName']), + handler=NullHandler() + ) + else: + self._printer = Printer() self('running pyasn1 version %s' % __version__) for f in flags: - if f not in flagMap: - raise error.PyAsn1Error('bad debug flag %s' % (f,)) - self._flags = self._flags | flagMap[f] - self('debug category \'%s\' enabled' % f) - + inverse = f and f[0] in ('!', '~') + if inverse: + f = f[1:] + try: + if inverse: + self._flags &= ~flagMap[f] + else: + self._flags |= flagMap[f] + except KeyError: + raise error.PyAsn1Error('bad debug flag %s' % f) + + self('debug category \'%s\' %s' % (f, inverse and 'disabled' or 'enabled')) + def __str__(self): return 'logger %s, flags %x' % (self._printer, self._flags) def __call__(self, msg): - self._printer('DBG: %s\n' % msg) + self._printer(msg) def __and__(self, flag): return self._flags & flag diff --git a/lib/pyasn1/type/base.py b/lib/pyasn1/type/base.py index 40873719ca..72920a9d06 100644 --- a/lib/pyasn1/type/base.py +++ b/lib/pyasn1/type/base.py @@ -1,13 +1,13 @@ # Base classes for ASN.1 types import sys -from pyasn1.type import constraint, tagmap +from pyasn1.type import constraint, tagmap, tag from pyasn1 import error class Asn1Item: pass class Asn1ItemBase(Asn1Item): # Set of tags for this ASN.1 type - tagSet = () + tagSet = tag.TagSet() # A list of constraint.Constraint instances for checking values subtypeSpec = constraint.ConstraintsIntersection() @@ -38,22 +38,28 @@ def getTagSet(self): return self._tagSet def getEffectiveTagSet(self): return self._tagSet # used by untagged types def getTagMap(self): return tagmap.TagMap({self._tagSet: self}) - def isSameTypeWith(self, other): + def isSameTypeWith(self, other, matchTags=True, matchConstraints=True): return self is other or \ - self._tagSet == other.getTagSet() and \ - self._subtypeSpec == other.getSubtypeSpec() - def isSuperTypeOf(self, other): + (not matchTags or \ + self._tagSet == other.getTagSet()) and \ + (not matchConstraints or \ + self._subtypeSpec==other.getSubtypeSpec()) + + def isSuperTypeOf(self, other, matchTags=True, matchConstraints=True): """Returns true if argument is a ASN1 subtype of ourselves""" - return self._tagSet.isSuperTagSetOf(other.getTagSet()) and \ - self._subtypeSpec.isSuperTypeOf(other.getSubtypeSpec()) + return (not matchTags or \ + self._tagSet.isSuperTagSetOf(other.getTagSet())) and \ + (not matchConstraints or \ + (self._subtypeSpec.isSuperTypeOf(other.getSubtypeSpec()))) -class __NoValue: +class NoValue: def __getattr__(self, attr): raise error.PyAsn1Error('No value for %s()' % attr) def __getitem__(self, i): raise error.PyAsn1Error('No value') + def __repr__(self): return '%s()' % self.__class__.__name__ -noValue = __NoValue() +noValue = NoValue() # Base class for "simple" ASN.1 objects. These are immutable. class AbstractSimpleAsn1Item(Asn1ItemBase): @@ -72,10 +78,15 @@ def __init__(self, value=None, tagSet=None, subtypeSpec=None): self._len = None def __repr__(self): - if self._value is noValue: - return self.__class__.__name__ + '()' - else: - return self.__class__.__name__ + '(%s)' % (self.prettyOut(self._value),) + r = [] + if self._value is not self.defaultValue: + r.append(self.prettyOut(self._value)) + if self._tagSet is not self.tagSet: + r.append('tagSet=%r' % (self._tagSet,)) + if self._subtypeSpec is not self.subtypeSpec: + r.append('subtypeSpec=%r' % (self._subtypeSpec,)) + return '%s(%s)' % (self.__class__.__name__, ', '.join(r)) + def __str__(self): return str(self._value) def __eq__(self, other): return self is other and True or self._value == other @@ -88,7 +99,11 @@ def __ge__(self, other): return self._value >= other def __nonzero__(self): return bool(self._value) else: def __bool__(self): return bool(self._value) - def __hash__(self): return self.__hashedValue + def __hash__(self): + return self.__hashedValue is noValue and hash(noValue) or self.__hashedValue + + def hasValue(self): + return not isinstance(self._value, NoValue) def clone(self, value=None, tagSet=None, subtypeSpec=None): if value is None and tagSet is None and subtypeSpec is None: @@ -121,14 +136,17 @@ def prettyIn(self, value): return value def prettyOut(self, value): return str(value) def prettyPrint(self, scope=0): - if self._value is noValue: - return '' - else: + if self.hasValue(): return self.prettyOut(self._value) + else: + return '' # XXX Compatibility stub def prettyPrinter(self, scope=0): return self.prettyPrint(scope) + def prettyPrintType(self, scope=0): + return '%s -> %s' % (self.getTagSet(), self.__class__.__name__) + # # Constructed types: # * There are five of them: Sequence, SequenceOf/SetOf, Set and Choice @@ -166,13 +184,16 @@ def __init__(self, componentType=None, tagSet=None, self._componentValuesSet = 0 def __repr__(self): - r = self.__class__.__name__ + '()' - for idx in range(len(self._componentValues)): - if self._componentValues[idx] is None: - continue - r = r + '.setComponentByPosition(%s, %r)' % ( - idx, self._componentValues[idx] - ) + r = [] + if self._componentType is not self.componentType: + r.append('componentType=%r' % (self._componentType,)) + if self._tagSet is not self.tagSet: + r.append('tagSet=%r' % (self._tagSet,)) + if self._subtypeSpec is not self.subtypeSpec: + r.append('subtypeSpec=%r' % (self._subtypeSpec,)) + r = '%s(%s)' % (self.__class__.__name__, ', '.join(r)) + if self._componentValues: + r += '.setComponents(%s)' % ', '.join([repr(x) for x in self._componentValues]) return r def __eq__(self, other): @@ -235,8 +256,17 @@ def getComponentByPosition(self, idx): def setComponentByPosition(self, idx, value, verifyConstraints=True): raise error.PyAsn1Error('Method not implemented') + def setComponents(self, *args, **kwargs): + for idx in range(len(args)): + self[idx] = args[idx] + for k in kwargs: + self[k] = kwargs[k] + return self + def getComponentType(self): return self._componentType + def setDefaultComponents(self): pass + def __getitem__(self, idx): return self.getComponentByPosition(idx) def __setitem__(self, idx, value): self.setComponentByPosition(idx, value) @@ -246,4 +276,3 @@ def clear(self): self._componentValues = [] self._componentValuesSet = 0 - def setDefaultComponents(self): pass diff --git a/lib/pyasn1/type/char.py b/lib/pyasn1/type/char.py index ae112f8bd3..af49ab3ef5 100644 --- a/lib/pyasn1/type/char.py +++ b/lib/pyasn1/type/char.py @@ -1,12 +1,6 @@ # ASN.1 "character string" types from pyasn1.type import univ, tag -class UTF8String(univ.OctetString): - tagSet = univ.OctetString.tagSet.tagImplicitly( - tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12) - ) - encoding = "utf-8" - class NumericString(univ.OctetString): tagSet = univ.OctetString.tagSet.tagImplicitly( tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 18) @@ -21,7 +15,8 @@ class TeletexString(univ.OctetString): tagSet = univ.OctetString.tagSet.tagImplicitly( tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 20) ) - + +class T61String(TeletexString): pass class VideotexString(univ.OctetString): tagSet = univ.OctetString.tagSet.tagImplicitly( @@ -43,6 +38,8 @@ class VisibleString(univ.OctetString): tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 26) ) +class ISO646String(VisibleString): pass + class GeneralString(univ.OctetString): tagSet = univ.OctetString.tagSet.tagImplicitly( tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 27) @@ -59,3 +56,9 @@ class BMPString(univ.OctetString): tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 30) ) encoding = "utf-16-be" + +class UTF8String(univ.OctetString): + tagSet = univ.OctetString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12) + ) + encoding = "utf-8" diff --git a/lib/pyasn1/type/namedtype.py b/lib/pyasn1/type/namedtype.py index 48967a5fe2..aca4282887 100644 --- a/lib/pyasn1/type/namedtype.py +++ b/lib/pyasn1/type/namedtype.py @@ -8,9 +8,17 @@ class NamedType: isDefaulted = 0 def __init__(self, name, t): self.__name = name; self.__type = t - def __repr__(self): return '%s(%s, %s)' % ( + def __repr__(self): return '%s(%r, %r)' % ( self.__class__.__name__, self.__name, self.__type ) + def __eq__(self, other): return tuple(self) == tuple(other) + def __ne__(self, other): return tuple(self) != tuple(other) + def __lt__(self, other): return tuple(self) < tuple(other) + def __le__(self, other): return tuple(self) <= tuple(other) + def __gt__(self, other): return tuple(self) > tuple(other) + def __ge__(self, other): return tuple(self) >= tuple(other) + def __hash__(self): return hash(tuple(self)) + def getType(self): return self.__type def getName(self): return self.__name def __getitem__(self, idx): @@ -33,11 +41,18 @@ def __init__(self, *namedTypes): self.__ambigiousTypes = {} def __repr__(self): - r = '%s(' % self.__class__.__name__ - for n in self.__namedTypes: - r = r + '%r, ' % (n,) - return r + ')' - + return '%s(%s)' % ( + self.__class__.__name__, + ', '.join([ repr(x) for x in self.__namedTypes ]) + ) + def __eq__(self, other): return tuple(self) == tuple(other) + def __ne__(self, other): return tuple(self) != tuple(other) + def __lt__(self, other): return tuple(self) < tuple(other) + def __le__(self, other): return tuple(self) <= tuple(other) + def __gt__(self, other): return tuple(self) > tuple(other) + def __ge__(self, other): return tuple(self) >= tuple(other) + def __hash__(self): return hash(tuple(self)) + def __getitem__(self, idx): return self.__namedTypes[idx] if sys.version_info[0] <= 2: @@ -45,7 +60,9 @@ def __nonzero__(self): return bool(self.__namedTypesLen) else: def __bool__(self): return bool(self.__namedTypesLen) def __len__(self): return self.__namedTypesLen - + + def clone(self): return self.__class__(*self.__namedTypes) + def getTypeByPosition(self, idx): if idx < 0 or idx >= self.__namedTypesLen: raise error.PyAsn1Error('Type position out of range') diff --git a/lib/pyasn1/type/namedval.py b/lib/pyasn1/type/namedval.py index d0fea7cc7c..676cb934b4 100644 --- a/lib/pyasn1/type/namedval.py +++ b/lib/pyasn1/type/namedval.py @@ -22,7 +22,19 @@ def __init__(self, *namedValues): self.valToNameIdx[val] = name self.namedValues = self.namedValues + ((name, val),) automaticVal = automaticVal + 1 + + def __repr__(self): + return '%s(%s)' % (self.__class__.__name__, ', '.join([repr(x) for x in self.namedValues])) + def __str__(self): return str(self.namedValues) + + def __eq__(self, other): return tuple(self) == tuple(other) + def __ne__(self, other): return tuple(self) != tuple(other) + def __lt__(self, other): return tuple(self) < tuple(other) + def __le__(self, other): return tuple(self) <= tuple(other) + def __gt__(self, other): return tuple(self) > tuple(other) + def __ge__(self, other): return tuple(self) >= tuple(other) + def __hash__(self): return hash(tuple(self)) def getName(self, value): if value in self.valToNameIdx: diff --git a/lib/pyasn1/type/tag.py b/lib/pyasn1/type/tag.py index 1144907fa1..7471a9b1fb 100644 --- a/lib/pyasn1/type/tag.py +++ b/lib/pyasn1/type/tag.py @@ -24,6 +24,9 @@ def __init__(self, tagClass, tagFormat, tagId): self.uniq = (tagClass, tagId) self.__hashedUniqTag = hash(self.uniq) + def __str__(self): + return '[%s:%s:%s]' % self.__tag + def __repr__(self): return '%s(tagClass=%s, tagFormat=%s, tagId=%s)' % ( (self.__class__.__name__,) + self.__tag @@ -62,11 +65,14 @@ def __init__(self, baseTag=(), *superTags): _uniq = _uniq + t.uniq self.uniq = _uniq self.__lenOfSuperTags = len(superTags) - + + def __str__(self): + return self.__superTags and '+'.join([str(x) for x in self.__superTags]) or '[untagged]' + def __repr__(self): return '%s(%s)' % ( self.__class__.__name__, - ', '.join([repr(x) for x in self.__superTags]) + '(), ' + ', '.join([repr(x) for x in self.__superTags]) ) def __add__(self, superTag): diff --git a/lib/pyasn1/type/tagmap.py b/lib/pyasn1/type/tagmap.py index 7cec3a10e4..feb91ae3d8 100644 --- a/lib/pyasn1/type/tagmap.py +++ b/lib/pyasn1/type/tagmap.py @@ -21,9 +21,23 @@ def __getitem__(self, tagSet): raise KeyError() def __repr__(self): - s = '%r/%r' % (self.__posMap, self.__negMap) + s = self.__class__.__name__ + '(' + if self.__posMap: + s = s + 'posMap=%r, ' % (self.__posMap,) + if self.__negMap: + s = s + 'negMap=%r, ' % (self.__negMap,) if self.__defType is not None: - s = s + '/%r' % (self.__defType,) + s = s + 'defType=%r' % (self.__defType,) + return s + ')' + + def __str__(self): + s = self.__class__.__name__ + ':\n' + if self.__posMap: + s = s + 'posMap:\n%s, ' % ',\n '.join([ x.prettyPrintType() for x in self.__posMap.values()]) + if self.__negMap: + s = s + 'negMap:\n%s, ' % ',\n '.join([ x.prettyPrintType() for x in self.__negMap.values()]) + if self.__defType is not None: + s = s + 'defType:\n%s, ' % self.__defType.prettyPrintType() return s def clone(self, parentType, tagMap, uniq=False): diff --git a/lib/pyasn1/type/univ.py b/lib/pyasn1/type/univ.py index 9cd16f8a2a..4ed640f2da 100644 --- a/lib/pyasn1/type/univ.py +++ b/lib/pyasn1/type/univ.py @@ -1,5 +1,5 @@ # ASN.1 "universal" data types -import operator, sys +import operator, sys, math from pyasn1.type import base, tag, constraint, namedtype, namedval, tagmap from pyasn1.codec.ber import eoo from pyasn1.compat import octets @@ -22,6 +22,12 @@ def __init__(self, value=None, tagSet=None, subtypeSpec=None, self, value, tagSet, subtypeSpec ) + def __repr__(self): + if self.__namedValues is not self.namedValues: + return '%s, %r)' % (base.AbstractSimpleAsn1Item.__repr__(self)[:-1], self.__namedValues) + else: + return base.AbstractSimpleAsn1Item.__repr__(self) + def __and__(self, value): return self.clone(self._value & value) def __rand__(self, value): return self.clone(value & self._value) def __or__(self, value): return self.clone(self._value | value) @@ -57,8 +63,21 @@ def __int__(self): return int(self._value) if sys.version_info[0] <= 2: def __long__(self): return long(self._value) def __float__(self): return float(self._value) - def __abs__(self): return abs(self._value) + def __abs__(self): return self.clone(abs(self._value)) def __index__(self): return int(self._value) + def __pos__(self): return self.clone(+self._value) + def __neg__(self): return self.clone(-self._value) + def __invert__(self): return self.clone(~self._value) + def __round__(self, n=0): + r = round(self._value, n) + if n: + return self.clone(r) + else: + return r + def __floor__(self): return math.floor(self._value) + def __ceil__(self): return math.ceil(self._value) + if sys.version_info[0:2] > (2, 5): + def __trunc__(self): return self.clone(math.trunc(self._value)) def __lt__(self, value): return self._value < value def __le__(self, value): return self._value <= value @@ -73,7 +92,7 @@ def prettyIn(self, value): return int(value) except: raise error.PyAsn1Error( - 'Can\'t coerce %s into integer: %s' % (value, sys.exc_info()[1]) + 'Can\'t coerce %r into integer: %s' % (value, sys.exc_info()[1]) ) r = self.__namedValues.getValue(value) if r is not None: @@ -82,7 +101,7 @@ def prettyIn(self, value): return int(value) except: raise error.PyAsn1Error( - 'Can\'t coerce %s into integer: %s' % (value, sys.exc_info()[1]) + 'Can\'t coerce %r into integer: %s' % (value, sys.exc_info()[1]) ) def prettyOut(self, value): @@ -260,6 +279,15 @@ def prettyIn(self, value): def prettyOut(self, value): return '\"\'%s\'B\"' % ''.join([str(x) for x in value]) +try: + all +except NameError: # Python 2.4 + def all(iterable): + for element in iterable: + if not element: + return False + return True + class OctetString(base.AbstractSimpleAsn1Item): tagSet = baseTagSet = tag.initTagSet( tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x04) @@ -280,7 +308,7 @@ def __init__(self, value=None, tagSet=None, subtypeSpec=None, value = self.defaultHexValue if value is None or value is base.noValue: value = self.defaultBinValue - self.__intValue = None + self.__asNumbersCache = None base.AbstractSimpleAsn1Item.__init__(self, value, tagSet, subtypeSpec) def clone(self, value=None, tagSet=None, subtypeSpec=None, @@ -304,19 +332,33 @@ def clone(self, value=None, tagSet=None, subtypeSpec=None, def prettyIn(self, value): if isinstance(value, str): return value + elif isinstance(value, unicode): + try: + return value.encode(self._encoding) + except (LookupError, UnicodeEncodeError): + raise error.PyAsn1Error( + 'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding) + ) elif isinstance(value, (tuple, list)): try: return ''.join([ chr(x) for x in value ]) except ValueError: raise error.PyAsn1Error( 'Bad OctetString initializer \'%s\'' % (value,) - ) + ) else: return str(value) else: def prettyIn(self, value): if isinstance(value, bytes): return value + elif isinstance(value, str): + try: + return value.encode(self._encoding) + except UnicodeEncodeError: + raise error.PyAsn1Error( + 'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding) + ) elif isinstance(value, OctetString): return value.asOctets() elif isinstance(value, (tuple, list, map)): @@ -325,14 +367,14 @@ def prettyIn(self, value): except ValueError: raise error.PyAsn1Error( 'Bad OctetString initializer \'%s\'' % (value,) - ) + ) else: try: return str(value).encode(self._encoding) except UnicodeEncodeError: raise error.PyAsn1Error( 'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding) - ) + ) def fromBinaryString(self, value): @@ -369,21 +411,33 @@ def fromHexString(self, value): def prettyOut(self, value): if sys.version_info[0] <= 2: - numbers = tuple([ ord(x) for x in value ]) + numbers = tuple(( ord(x) for x in value )) else: numbers = tuple(value) - if [ x for x in numbers if x < 32 or x > 126 ]: - return '0x' + ''.join([ '%.2x' % x for x in numbers ]) - else: + if all(x >= 32 and x <= 126 for x in numbers): return str(value) + else: + return '0x' + ''.join(( '%.2x' % x for x in numbers )) def __repr__(self): - if self._value is base.noValue: - return self.__class__.__name__ + '()' - if [ x for x in self.asNumbers() if x < 32 or x > 126 ]: - return self.__class__.__name__ + '(hexValue=\'' + ''.join([ '%.2x' % x for x in self.asNumbers() ])+'\')' - else: - return self.__class__.__name__ + '(\'' + self.prettyOut(self._value) + '\')' + r = [] + doHex = False + if self._value is not self.defaultValue: + for x in self.asNumbers(): + if x < 32 or x > 126: + doHex = True + break + if not doHex: + r.append('%r' % (self._value,)) + if self._tagSet is not self.tagSet: + r.append('tagSet=%r' % (self._tagSet,)) + if self._subtypeSpec is not self.subtypeSpec: + r.append('subtypeSpec=%r' % (self._subtypeSpec,)) + if self.encoding is not self._encoding: + r.append('encoding=%r' % (self._encoding,)) + if doHex: + r.append('hexValue=%r' % ''.join([ '%.2x' % x for x in self.asNumbers() ])) + return '%s(%s)' % (self.__class__.__name__, ', '.join(r)) if sys.version_info[0] <= 2: def __str__(self): return str(self._value) @@ -391,17 +445,17 @@ def __unicode__(self): return self._value.decode(self._encoding, 'ignore') def asOctets(self): return self._value def asNumbers(self): - if self.__intValue is None: - self.__intValue = tuple([ ord(x) for x in self._value ]) - return self.__intValue + if self.__asNumbersCache is None: + self.__asNumbersCache = tuple([ ord(x) for x in self._value ]) + return self.__asNumbersCache else: def __str__(self): return self._value.decode(self._encoding, 'ignore') def __bytes__(self): return self._value def asOctets(self): return self._value def asNumbers(self): - if self.__intValue is None: - self.__intValue = tuple(self._value) - return self.__intValue + if self.__asNumbersCache is None: + self.__asNumbersCache = tuple(self._value) + return self.__asNumbersCache # Immutable sequence object protocol @@ -419,7 +473,9 @@ def __add__(self, value): return self.clone(self._value + self.prettyIn(value)) def __radd__(self, value): return self.clone(self.prettyIn(value) + self._value) def __mul__(self, value): return self.clone(self._value * value) def __rmul__(self, value): return self * value - + def __int__(self): return int(self._value) + def __float__(self): return float(self._value) + class Null(OctetString): defaultValue = ''.encode() # This is tightly constrained tagSet = baseTagSet = tag.initTagSet( @@ -430,7 +486,9 @@ class Null(OctetString): if sys.version_info[0] <= 2: intTypes = (int, long) else: - intTypes = int + intTypes = (int,) + +numericTypes = intTypes + (float,) class ObjectIdentifier(base.AbstractSimpleAsn1Item): tagSet = baseTagSet = tag.initTagSet( @@ -456,7 +514,9 @@ def __getitem__(self, i): return self._value[i] def __str__(self): return self.prettyPrint() - + def __repr__(self): + return '%s(%r)' % (self.__class__.__name__, self.prettyPrint()) + def index(self, suboid): return self._value.index(suboid) def isPrefixOf(self, value): @@ -473,7 +533,7 @@ def prettyIn(self, value): pass elif isinstance(value, ObjectIdentifier): return tuple(value) - elif isinstance(value, str): + elif octets.isStringType(value): r = [] for element in [ x for x in value.split('.') if x != '' ]: try: @@ -504,6 +564,7 @@ def prettyIn(self, value): def prettyOut(self, value): return '.'.join([ str(x) for x in value ]) class Real(base.AbstractSimpleAsn1Item): + binEncBase = None # binEncBase = 16 is recommended for large numbers try: _plusInf = float('inf') _minusInf = float('-inf') @@ -526,11 +587,13 @@ def __normalizeBase10(self, value): def prettyIn(self, value): if isinstance(value, tuple) and len(value) == 3: - for d in value: - if not isinstance(d, intTypes): - raise error.PyAsn1Error( - 'Lame Real value syntax: %s' % (value,) - ) + if not isinstance(value[0], numericTypes) or \ + not isinstance(value[1], intTypes) or \ + not isinstance(value[2], intTypes): + raise error.PyAsn1Error('Lame Real value syntax: %s' % (value,)) + if isinstance(value[0], float) and \ + self._inf and value[0] in self._inf: + return value[0] if value[1] not in (2, 10): raise error.PyAsn1Error( 'Prohibited base for Real value: %s' % (value[1],) @@ -540,7 +603,14 @@ def prettyIn(self, value): return value elif isinstance(value, intTypes): return self.__normalizeBase10((value, 10, 0)) - elif isinstance(value, float): + elif isinstance(value, (str, float)): + if isinstance(value, str): + try: + value = float(value) + except ValueError: + raise error.PyAsn1Error( + 'Bad real value syntax: %s' % (value,) + ) if self._inf and value in self._inf: return value else: @@ -551,11 +621,6 @@ def prettyIn(self, value): return self.__normalizeBase10((int(value), 10, e)) elif isinstance(value, Real): return tuple(value) - elif isinstance(value, str): # handle infinite literal - try: - return float(value) - except ValueError: - pass raise error.PyAsn1Error( 'Bad real value syntax: %s' % (value,) ) @@ -566,6 +631,12 @@ def prettyOut(self, value): else: return str(value) + def prettyPrint(self, scope=0): + if self.isInfinity(): + return self.prettyOut(self._value) + else: + return str(float(self)) + def isPlusInfinity(self): return self._value == self._plusInf def isMinusInfinity(self): return self._value == self._minusInf def isInfinity(self): return self._value in self._inf @@ -601,8 +672,20 @@ def __float__(self): else: return float( self._value[0] * pow(self._value[1], self._value[2]) - ) - def __abs__(self): return abs(float(self)) + ) + def __abs__(self): return self.clone(abs(float(self))) + def __pos__(self): return self.clone(+float(self)) + def __neg__(self): return self.clone(-float(self)) + def __round__(self, n=0): + r = round(float(self), n) + if n: + return self.clone(r) + else: + return r + def __floor__(self): return self.clone(math.floor(float(self))) + def __ceil__(self): return self.clone(math.ceil(float(self))) + if sys.version_info[0:2] > (2, 5): + def __trunc__(self): return self.clone(math.trunc(float(self))) def __lt__(self, value): return float(self) < value def __le__(self, value): return float(self) <= value @@ -636,6 +719,7 @@ class SetOf(base.AbstractConstructedAsn1Item): tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x11) ) typeId = 1 + strictConstraints = False def _cloneComponentValues(self, myClone, cloneValueFlag): idx = 0; l = len(self._componentValues) @@ -651,9 +735,14 @@ def _cloneComponentValues(self, myClone, cloneValueFlag): idx = idx + 1 def _verifyComponent(self, idx, value): - if self._componentType is not None and \ - not self._componentType.isSuperTypeOf(value): - raise error.PyAsn1Error('Component type error %s' % (value,)) + t = self._componentType + if t is None: + return + if not t.isSameTypeWith(value,matchConstraints=self.strictConstraints): + raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, t)) + if self.strictConstraints and \ + not t.isSuperTypeOf(value, matchTags=False): + raise error.PyAsn1Error('Component value is constraints-incompatible: %r vs %r' % (value, t)) def getComponentByPosition(self, idx): return self._componentValues[idx] def setComponentByPosition(self, idx, value=None, verifyConstraints=True): @@ -698,6 +787,14 @@ def prettyPrint(self, scope=0): r = r + self._componentValues[idx].prettyPrint(scope) return r + def prettyPrintType(self, scope=0): + scope = scope + 1 + r = '%s -> %s {\n' % (self.getTagSet(), self.__class__.__name__) + if self._componentType is not None: + r = r + ' '*scope + r = r + self._componentType.prettyPrintType(scope) + return r + '\n' + ' '*(scope-1) + '}' + class SequenceOf(SetOf): tagSet = baseTagSet = tag.initTagSet( tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x10) @@ -706,15 +803,15 @@ class SequenceOf(SetOf): class SequenceAndSetBase(base.AbstractConstructedAsn1Item): componentType = namedtype.NamedTypes() + strictConstraints = False def __init__(self, componentType=None, tagSet=None, subtypeSpec=None, sizeSpec=None): + if componentType is None: + componentType = self.componentType base.AbstractConstructedAsn1Item.__init__( - self, componentType, tagSet, subtypeSpec, sizeSpec - ) - if self._componentType is None: - self._componentTypeLen = 0 - else: - self._componentTypeLen = len(self._componentType) + self, componentType.clone(), tagSet, subtypeSpec, sizeSpec + ) + self._componentTypeLen = len(self._componentType) def __getitem__(self, idx): if isinstance(idx, str): @@ -747,8 +844,11 @@ def _verifyComponent(self, idx, value): 'Component type error out of range' ) t = self._componentType[idx].getType() - if not t.isSuperTypeOf(value): - raise error.PyAsn1Error('Component type error %r vs %r' % (t, value)) + if not t.isSameTypeWith(value,matchConstraints=self.strictConstraints): + raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, t)) + if self.strictConstraints and \ + not t.isSuperTypeOf(value, matchTags=False): + raise error.PyAsn1Error('Component value is constraints-incompatible: %r vs %r' % (value, t)) def getComponentByName(self, name): return self.getComponentByPosition( @@ -756,9 +856,8 @@ def getComponentByName(self, name): ) def setComponentByName(self, name, value=None, verifyConstraints=True): return self.setComponentByPosition( - self._componentType.getPositionByName(name), value, - verifyConstraints - ) + self._componentType.getPositionByName(name),value,verifyConstraints + ) def getComponentByPosition(self, idx): try: @@ -767,7 +866,11 @@ def getComponentByPosition(self, idx): if idx < self._componentTypeLen: return raise - def setComponentByPosition(self, idx, value=None, verifyConstraints=True): + def setComponentByPosition(self, idx, value=None, + verifyConstraints=True, + exactTypes=False, + matchTags=True, + matchConstraints=True): l = len(self._componentValues) if idx >= l: self._componentValues = self._componentValues + (idx-l+1)*[None] @@ -834,6 +937,17 @@ def prettyPrint(self, scope=0): ) return r + def prettyPrintType(self, scope=0): + scope = scope + 1 + r = '%s -> %s {\n' % (self.getTagSet(), self.__class__.__name__) + for idx in range(len(self.componentType)): + r = r + ' '*scope + r = r + '"%s"' % self.componentType.getNameByPosition(idx) + r = '%s = %s\n' % ( + r, self._componentType.getTypeByPosition(idx).prettyPrintType(scope) + ) + return r + '\n' + ' '*(scope-1) + '}' + class Sequence(SequenceAndSetBase): tagSet = baseTagSet = tag.initTagSet( tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x10) @@ -877,16 +991,16 @@ def setComponentByType(self, tagSet, value=None, innerFlag=0, if t.getTagSet(): return self.setComponentByPosition( idx, value, verifyConstraints - ) + ) else: t = self.setComponentByPosition(idx).getComponentByPosition(idx) return t.setComponentByType( tagSet, value, innerFlag, verifyConstraints - ) + ) else: # set outer component by inner tagSet return self.setComponentByPosition( idx, value, verifyConstraints - ) + ) def getComponentTagMap(self): if self._componentType: diff --git a/lib/pyasn1/type/useful.py b/lib/pyasn1/type/useful.py index a7139c22ce..1766534889 100644 --- a/lib/pyasn1/type/useful.py +++ b/lib/pyasn1/type/useful.py @@ -1,6 +1,11 @@ # ASN.1 "useful" types from pyasn1.type import char, tag +class ObjectDescriptor(char.GraphicString): + tagSet = char.GraphicString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 7) + ) + class GeneralizedTime(char.VisibleString): tagSet = char.VisibleString.tagSet.tagImplicitly( tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 24) diff --git a/lib/pytz/__init__.py b/lib/pytz/__init__.py index f63bb9d53a..20805ea3fc 100644 --- a/lib/pytz/__init__.py +++ b/lib/pytz/__init__.py @@ -9,8 +9,8 @@ ''' # The IANA (nee Olson) database is updated several times a year. -OLSON_VERSION = '2016c' -VERSION = '2016.3' # Switching to pip compatible version numbering. +OLSON_VERSION = '2016d' +VERSION = '2016.4' # Switching to pip compatible version numbering. __version__ = VERSION OLSEN_VERSION = OLSON_VERSION # Old releases had this misspelling @@ -804,6 +804,7 @@ def _test(): 'Asia/Thimbu', 'Asia/Thimphu', 'Asia/Tokyo', + 'Asia/Tomsk', 'Asia/Ujung_Pandang', 'Asia/Ulaanbaatar', 'Asia/Ulan_Bator', @@ -930,6 +931,7 @@ def _test(): 'Europe/Jersey', 'Europe/Kaliningrad', 'Europe/Kiev', + 'Europe/Kirov', 'Europe/Lisbon', 'Europe/Ljubljana', 'Europe/London', @@ -1360,6 +1362,7 @@ def _test(): 'Asia/Tehran', 'Asia/Thimphu', 'Asia/Tokyo', + 'Asia/Tomsk', 'Asia/Ulaanbaatar', 'Asia/Urumqi', 'Asia/Ust-Nera', @@ -1418,6 +1421,7 @@ def _test(): 'Europe/Jersey', 'Europe/Kaliningrad', 'Europe/Kiev', + 'Europe/Kirov', 'Europe/Lisbon', 'Europe/Ljubljana', 'Europe/London', diff --git a/lib/pytz/zoneinfo/America/Caracas b/lib/pytz/zoneinfo/America/Caracas index 15b9a52c2c4c6f1cefb2b8416b6d9a99f374eaab..e1a09a64808d6bc39be5b4c2843c5a1e153f3c81 100644 GIT binary patch literal 261 zcmWHE%1kq2zyK^j5fBCeRv-qkc^ZJkWH}w1Z!_L_xJOSa2v=QMz{teR#QgvNK?eo~ zAnC%u^8f#V9Sj`*{~tfVz~SQ?!r%4G*g`g-O0b1LpnCoTL1w#@1BeDW a07Qcv0g?te1Vn=z!$7iYxPVSG;Q{~-zCmXI literal 266 zcmWHE%1kq2zyPd35fBCe79a+(c^ZJkWH}w1Z!_L_xJOSaU}R!u`v3o+0|Nt)bYWom z|Np=a29E##j~@V$AhwTh2!k^ahq;CT^};|1VLM^=^ZbBn`VRz|?NSaP8sr8L4RQxa T8sruRavj43beoBlu>ltV-_k|Y diff --git a/lib/pytz/zoneinfo/Asia/Almaty b/lib/pytz/zoneinfo/Asia/Almaty index 75a007deeb1c343c06ac4bc95ef953a1582e8aae..2ede12272b0e1129fccef2efa9cd518895a0d997 100644 GIT binary patch delta 395 zcmZ3%{*!%zxF{C`0|N+yfC&(T*a8!EVuioFbI^G5HbAptYJldI3l3UG4mwPnk}z41 zS-hT&orM)AWMJ@60U72Nz`(-5kW|3P!N3rbz`z3|VIqt`5p*>^z99_S2Btu44#Z|4 zV;C600L%mvAyE^VM6o-2@-)W1%0PF6)PdX%qCxHl(?CCfXplc9r!gIztj{bqupZ&E MVF&urPS=bJ04PZ}<^TWy delta 358 zcmey#zJh&%xF{O~0|N+yfH4q**!&Z9V!7*U7HD2sCZKhslVReVgnB2_iwX>k%uEo- z!iofeLdaZXHZuc*hYHAOw*Uqf28N^pMotEXkOUALLV`qmd_x!6Y@?m%Cxt4RnG`8voT1RpoOq{cBvJ#8Jxt4RnG`7GET1QM4Oq{chiG^{p0!z#03ye&RT!9-HfB+=3`3{p4BSfwj0L&c^ A>;M1& diff --git a/lib/pytz/zoneinfo/Asia/Aqtau b/lib/pytz/zoneinfo/Asia/Aqtau index 811ed2f9161cb466ddf657bb8334c6840d0b88bd..7bfacff50c2058eab96abff1df97918765df4636 100644 GIT binary patch delta 491 zcmeyy@tS>txFQz=0|N+yfH4q**a8heVwL2S2U{2ab(m=5tN!IrfX0*G37Qp)6Ev^f z3(z`pK0tfRfdHK)8v}HwEDF$TnH->BGAUu=8lQRwMkZz!CLm;GLxSua5Do`BCS+w` z&{qIj$Y9~Xz{0@b7Qn#4zz~wazyl%~fh0@>!e#`@B2@bLhA?Owm;kXU5SuZGfDB?_ z1OYG$OiU2h#p=x;P$mC?0OVZ|4f672X{Nc#KtF?|LH-8OAislYp#MQMC=4cFW>RCC dlrZ@>)6DvT^)`bH$W3V8=CT2Xik+@07Xa_FR3!ia literal 1142 zcmdtgPe_wt9KiACZCY+K*&*88GT;8#P*c}(W0uwo8bXua^Z`K%obEz~?YHdqvUSG1eqJxu7UwcM6Q45irg=ZU{NrijYpvS4l=hP!uUAuVD|*+f zYr5^pl-_-RLbu--(f*YIz2|I~-dj%TeS^(v;b99`;FgR?m`^d_c`-HEH?k87%UVOb4VTFi;ux1#X2gc=${ z_S(8_!xaiQg(J~Ow0ZmHD=b8xZ~q2SD2fIlO2OK0&!rM>_gdWf`m{%?3#a z$p=XY$p}da$q7lyBFG9!3&{&f49N^h&1rK(l5^VZko1uJkOYwokra^}ktC5Uku>o? M%`;+CmOZ)1Zw_Yfe*gdg diff --git a/lib/pytz/zoneinfo/Asia/Aqtobe b/lib/pytz/zoneinfo/Asia/Aqtobe index ff3b96b3e9d49adf945c2f9e40cf9cbafc94b19e..3bd7f09d859fd4c3837d8916078b1757300bcfee 100644 GIT binary patch delta 364 zcmbQk(aAAEn^9n*POR{kKLHv~ekW*FEKbn8axXyZ$oYVYGZH4}F^ku;bFi}EgbWO( zGZ=ut!hwN>fx#_+frEh|B!Q6!L^1$LWD&OjMxZQ2B}5HK*2g!5LEFFth)sdm3}hGs z10xuKnUm)*8Zrt@KF7FM8R$rm5Xhk*8su0o4RkPw20422GNyx*^O?m4+SU9Z2Qe^k M*#Ny|r)$au04pRpKL7v# delta 414 zcmeC=n8Ptan^9__PAq?Y*9J}RWd@p8wkT*FnJzGKPC~tt>3stRMkZz!W+otHWkZ5Y z9PAvNT-YH81B2-d1|YC-U|?Zja0>vDAqk9}AQDrAksqu^6s!iy_VEp2aFp{70g~P! z!9X$)N``>UWnf@rWMG)w&1k_WHTekRUTL7)L82h{gK3~2Ks3l7lb0|ZtOv3O(l2IU TZ{YI_$Uktv1@vu*t|=D)>1#X& diff --git a/lib/pytz/zoneinfo/Asia/Baku b/lib/pytz/zoneinfo/Asia/Baku index 69ce811f847383c740ee4af2725693bb11122b52..641148f2551e50be780346b7808128cf1ed58421 100644 GIT binary patch delta 193 zcmZ3=wUujvxF8<`0|N+yfGZI5P1ML^`c*J-#sOVUb`CCX9xh&@;N*EMR+BHWSg|oO gFfcMQO#aTOifQ&_U8Y+=!#8s>XE0J`ya1~x0NN!OuK)l5 delta 189 zcmdnWwUlduxF9bB0|N+yfGZI5PSnU_y5KNz#sOUpb`DN1ZcZMe;N)d2R+FExn6NM~ eFfva5&!~!N@?=w{TR@{X3o>UgQewCQt0(|5(--Xk diff --git a/lib/pytz/zoneinfo/Asia/Barnaul b/lib/pytz/zoneinfo/Asia/Barnaul index 989d9843360740010661173b0df834066be8686d..fe6fcf15806d46ea3011215046b78d84eae35796 100644 GIT binary patch delta 67 zcmcb~d6RR(G`2$qT1Pe;Oq{cegOvpgIVLAC%5JV^OlRaeWWWFfAo|5&j9kkE7=QpIwfQ$w5F<XWG`1}YT1S>DOq{c6auB1^-BDU;qM;^ya@z-i$~J FY5~3}6s`aO delta 56 zcmaFO`I>XWG`7wUT1RpoOq{cciJ4_{0OMjtuFek(KmZcm{Flj_5hy-6fN>@VmuHZ7 Ih^_?}0DnmnQ~&?~ diff --git a/lib/pytz/zoneinfo/Asia/Kamchatka b/lib/pytz/zoneinfo/Asia/Kamchatka index a0541cfa8725e7b5f581034578361976f676f65d..e182161447f5950dd4be8ccfc3ab28e65fda570e 100644 GIT binary patch delta 67 zcmbQsIhS+7G`8voT1RpoOq{cBauB1;)O;U;qM;^yZgLUW`Zz F3IVrp6nOvu delta 67 zcmX@bd5UwwG`1NGT1TorOq{cciHUi#Ba6i31V*08kt~^;?=mtma?M}>0g#H#FPXd; IQPmUz00(3g!2kdN diff --git a/lib/pytz/zoneinfo/Asia/Magadan b/lib/pytz/zoneinfo/Asia/Magadan index e09c4dc2e2fb483baf6e7b131b1ff1a0c16bbfd8..6ece3930bac9834018b5c1a95bc193a1a6ac1783 100644 GIT binary patch delta 159 zcmX@jd6RR3GNbcEl_Ivz4_Zg6K1`f5LpEG`4g&)tGZO@|up&V=_Q`>a%99fqS(rK4 xStdub*fGu8FnKnM`sBNeKyxNDGTr0q{J;PNAj3AlWHMw_0CLDMQIE-x3jmyOA(H?A delta 145 zcmcb~d75*AGNaQ(l_IwI16oI%HcXr|LzaP&nF#_}SdkzbI|~yt>*PR2WhM^x$<>T@ rOg#%G?`HI3behb@bdM|k00R(!%-H;n$&gWoBtz6EUtnQpGT;ILiV7Si diff --git a/lib/pytz/zoneinfo/Asia/Novokuznetsk b/lib/pytz/zoneinfo/Asia/Novokuznetsk index 11768662734db5b34b3ca8929f787e8483d4d0a4..6b978ccb8208ac409beca0cf18201b00c9a7da72 100644 GIT binary patch delta 57 zcmaFB`G9l6G`2$qT1Pe;Oq{c6auB1!+mSU;qM;^yZgLUW`Zz F3IWK(6!8E6 delta 67 zcmX@bd5UwwG`3{|T1Pq=CeGQ!#Kb(=kws#10wd4lNS4gacNv)&xt0kq00BtF=9f%f IjHqe~0nZZ?I{*Lx diff --git a/lib/pytz/zoneinfo/Asia/Oral b/lib/pytz/zoneinfo/Asia/Oral index 1467cafcc983c60e8cfaceeca24b0a1f5b59a3ef..55f526ba79c5e437740a6cb4a24855ca0cd8f97b 100644 GIT binary patch delta 422 zcmX@Z@soXmxF{C`0|N+yfC&(T*a8!EVuinQC1^bPouFB!TN#q5Q8kpF%S=dSzv<82IyZqT~jUqw2U?1 delta 513 zcmey#euiU$xF{b30|N+yfC&(T*isX9V)^T59MJUM;-Goukb%~br3w@0B-A^ZzI9+= zWMXDvW&%Q1HYjB0VCP`u;^gAy!47#D7z|WE=36*0urM&V1%Swq1V&B}i7w(601}6( zWaJ04MM1h4osl>UqCUPM452}eU?Mn#!9U0mLWY2BVqjnd0uTw|!AV>`p1g-~uQbrh oAcY`rgK41GK{UwwljkrUtOv3OG6?*DVL*i--~t9!h^{FY0P1T%U;qFB diff --git a/lib/pytz/zoneinfo/Asia/Qyzylorda b/lib/pytz/zoneinfo/Asia/Qyzylorda index ce53516181ef95768b73034c4dbf806d7d30e9dc..942ed07d05d74226637371b19b8de12a40434f05 100644 GIT binary patch literal 1033 zcmd6lJxo(k7(j0e71}bm$d3hiD*AwmVku3*w$!C@faD8lLP8+v03;+#Mxu!h#xyLl z0SjSh)HoQ1lHjB!42~v#gBvj}EXKq^9U$1~J+BNUOl;hjd(Z8CKSR&SPtMQ9%!v`U zCrn1%PWI=Y+T~5#Qt9o>O7?>LyxiQnA@7}CpAy?SLdqE}N*+7+$nwa8Ds9{#3oAS~{mztX*TSm=4QCOuy? z+xzjeM7MY3#;YyqTYVuniz{;LK~ef=7UlMINn`gP>p=RM#>c01uwzCOu@OyFa+-`x zN%CvA4jpD>_(MqV>^hQq?MnKcUouNU8Cls;t%2w;t}E!;uGQ&;uYc+;uqo=;u+!^ z;v3@JqxBAP5ApBOHUQ}W(gLIhNE47QAZ1kwtm7f3UZZams{ApLl>4M94B ev;^r1(iEgCNL!G;AdSJZ-dQMR8_e`2LjM6&l-3sj literal 1082 zcmdVYJ!n%=9ES08(^i|LpcJ)DV)fRlA+6OknviOv#}1;j)~FYyMIne*1O<`$F$fC% z06!+Fi#QZ28N?xqF4Ex)jt)nVE~PlTI8+Cv2K_%r2dyrS-b>E!_J%BdpJ3?Ra7O)c zHOL!2Tv_?>9KR{YlVz+qRx)w;TEswymeZZtSSqO^FY7bM&1}) za+xlZtL9BUF`)DFanrq+);njunq6~^x~Ck|g_oc7?(s#v=fSMjXXf?Z(FN0+ed8-H zR8w0MlDlr%FL!8pxIP?-{{0RsRVc~-LglrpRcc?6GbnlTJ}51HBj2u2>eNMVmHYO$ z=l+9%>N_yxj-!D(JSdNH$fLVFPjJVNzvT@6VXn_VfAvi+W<#dqHS-}8A~QN=N@PxC zQe;+ST4Y{iVq|7yYGiI?a%6U7dSrei0VD$?1*hbIB;k}SoRS8T2N#JTnINekxgg0P z*&yj4`5*})86hblIXNXMBrB(+h2(`KhGd4MhUA7Mhh&GOhvbL-Z&U;qM;^yYU=UW`Zz F$^i=`7Rmqs delta 51 zcmcc0d6jd*G`9EyT1T8VOq{cciHUi00OJ%!uJ{8CKmZcm{Eo?s5hy-6fN|2~Gc23{ D>%kF6 diff --git a/lib/pytz/zoneinfo/Asia/Tomsk b/lib/pytz/zoneinfo/Asia/Tomsk new file mode 100644 index 0000000000000000000000000000000000000000..682fa5e070f15645bba287878acddf6d01e4a4bb GIT binary patch literal 1241 zcmdVZPe{{I0KoCLoNjCqUP^1N%w5AZ)%*v9iD0UAP75{kKTQWVKSvBDmt4EGxe8W9@ z&7*ehzt^JI-iYdTm#cK?X}@0Iv0RrW7jz){Rc{DQ>2lv2z0vzh2c2JJaQ1`T^y$5- zxNu!njJ=eVub;@!)2yt@-j>z3uF1{)=jE1jJyM=%ms^ka>+soQdRtw$-hQw_@9^)_ zk#Imqepc#QpOUqcg?i_FncOuzqoWf}8GGQBvC(N=_k32>U!7OG?~W^Va#A()eY5t& zU#P~eF{`P3K*bw}tiAbtsyT4SYM!}oCGsv=iP0-o%f}8Y`MAq!9l5C5ZpN)t^1D2g zn6rc=Ok%p0u kk(O;u&q&j@rfZ~aq;I5gq;sTo{9k(a#LPdTzPiTq3qC3l=l}o! literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Ust-Nera b/lib/pytz/zoneinfo/Asia/Ust-Nera index 0efacd6beae621991e75418bce2ad3398b8830f7..68b13f07d5604a353a642ffcafeef98b4e5d303f 100644 GIT binary patch delta 59 zcmeC>>gAd+jji*8){&|Y6X$H3oWvrxxqz{ck*o6q0}z0OH*+utF@hu}7clm4a)tRg KhUgj^Z~*|g0u)XF delta 61 zcmeC>>gAd+jV=Cw))A)-6X$GWW?^QV9LT6Vxq+o+^B+bgMy~h+3_t*q-ps)q#E7Jz F7XWye5_tds diff --git a/lib/pytz/zoneinfo/Asia/Vladivostok b/lib/pytz/zoneinfo/Asia/Vladivostok index 156c8e6f5283b8164b63ef3517babf775f355e12..59789ecaa584c9c1ffebe032de28f8bf5308d8c7 100644 GIT binary patch delta 61 zcmX@jd75*=G`1NGT1Pq=CeGP3IfzkdasngE);vU;qM;^yZgLUW`Zz F3IUYJ6Nvx- delta 67 zcmX@bd5UwwG`8voT1Vn8Oq{cciHUi#Ba6i31V*08kt~^;?=mtma#cTI00NMT%`cg} J7*W*}0suHF70&T*leaQj pb1*P40s#ZVwHm#0&c;PiK^yJfD$+k#}+oYw+aV ztmaINjFW?y)KD}AfHZFIWb$O>25JRz7(m)L-(`+q#56#HkrAW<32?c(hUgk`0RXZ% BA0Pk# delta 152 zcmaFI-N!RQTu_jKfdPa;z#oVOCu%HYzj7}?>&QWei5K=W@$yZc$f&}|KiP*hc=BXc zXEp`~Mn(pP$xcjaD0%}xdN)@xc`|YXb%RU;nXvgRa|9!X88Q$vK#Gt6m#b@tt`Qdi Do4*{+ diff --git a/lib/pytz/zoneinfo/Europe/Kirov b/lib/pytz/zoneinfo/Europe/Kirov new file mode 100644 index 0000000000000000000000000000000000000000..2ddf3ceba6953f2256d98b0e29f60e05d4944e1a GIT binary patch literal 1153 zcmd7QJ!lhQ9LMn|ZPJ9q!NoSVNqwp5(O1u;DN)lJP213#LXqH56^btfLGVF3C^!_X zom5Z+6-5+-i?4%DRte%Fww>In@PLyq$bS*U;>+_r+sQ%PyyNZ@j@)v;Z)x=KcvAgw zgzOV8j-0*ty>imtZYn4AxEvW-^5J+skvhjIrTtqoVcOggXi_8eKV3gc1cn_*QIOM2}#E%r8~Jty1(z2Ov7=V zdD|zO7f1D$r_Hi;A*-{u^Sb9rqwbwf=xtY(&W#84_Nkcc=zp*C2YyIj;+5{t8yN`P z(}kwzQuzE*2CK_5IDbcm-c8HS`)8#%drJ?WJ1V7Od`$VOtA3yUz|vV0tO@$L{r&U@ zmFm1~KcQ-Mm0ziW5ug34nVed&d|!UzvrqU>ak!+~+?2i2_UhtQ<+F$P!`@2&u-(_C zmwwuV4Urvr%$CTW$flllS7cjcUu0usXJl(+Z)9_1cVv5Hf20AV1Ed9{2c!w43s2hy z(g)HA(h1TE(hJfI(hbrM(ht%Q(h<@U(i75@r|k-9%hUFSG=_ABw1)JCG>3GDw1@PE tG>CMFw21VGG>LSHwCQR4L>l$9og%Fwy&}yb-Qxe)Za8bp?(Im0e*t_y1*8A~ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Minsk b/lib/pytz/zoneinfo/Europe/Minsk index 28ef30a325b6bf018a9dbe68bc9f45cff080141b..241b14977ec3cae744e6e3412bd480184c3930df 100644 GIT binary patch delta 21 bcmcb?b%SfdDyE+e8`sQZ0#ciOn0c50a9jxj delta 21 bcmcb?b%SfdDyF*$8`sQZ0#ciOn0c50Yp@84 diff --git a/lib/pytz/zoneinfo/Europe/Moscow b/lib/pytz/zoneinfo/Europe/Moscow index bdbbaebe05e0d7dba96718b6448ca13d9a6f7300..7df786248b9eebe65376d9b3697b25ae2b7bd1a2 100644 GIT binary patch delta 65 zcmeyt{eyeLF}7a?T1Q?NOgyu1@+3yN$qN`cHg8}QV&eK$zyJgwsm*7Y!x>?clQ%G~ NVCV7;_SQA#0sx;B7?J=0 delta 71 zcmeyt{eyeLF}4d1T1Pe;OgyuXiH&u#A*;mX1&lnKH!un@ab0j=00NNg<}=LUj7aj6 QH!!YX=kg8q)-~n=0FQSTN&o-= diff --git a/lib/pytz/zoneinfo/Europe/Samara b/lib/pytz/zoneinfo/Europe/Samara index 389f69b7cff7a80bd276574a2048916632452b55..cac6058a1c25abb6bd33e205b208565aca21b746 100644 GIT binary patch delta 204 zcmcb_b%<+%xFA0R0|N+yfD;h&Pt+)4dy}AbSRx*M_g|bK(>I4-fYV3!bqawQb5B&`jG%vu%mB?t_c?a1y&u7 delta 208 zcmX@ab%|?&xS#+70|N+yfD;f4Ow=f1J7l1BWU0c$1sl|uc=-rHer~?W(^�KV)%d yWng4vWSGpwq=sqkWJjh)T!#!m_J9oEtjp}eNTTsFVBP|a diff --git a/lib/pytz/zoneinfo/Europe/Ulyanovsk b/lib/pytz/zoneinfo/Europe/Ulyanovsk index dbcab7369f6a12a5f77b6db87979797dfd5fe493..ac0586d133955bcb9407789a89f5fff2fca749cd 100644 GIT binary patch delta 101 zcmey&`I&RVG`3#_T1Q?NOq{cAauTE1HJ59GxF8<`0|N+yfD;h&P1Go2dR;Ja&L#~mZek!0_vBs{J249fMo9(+EdvHw z1_lcU5D8*WzQ8Cg;=2m}yf@=r!pOgBu{XL?clQ%G~ NVCV7;_SQA#0sx;B7?J=0 delta 71 zcmeyt{eyeLF}4d1T1Pe;OgyuXiH&u#A*;mX1&lnKH!un@ab0j=00NNg<}=LUj7aj6 QH!!YX=kg8q)-~n=0FQSTN&o-= diff --git a/lib/pytz/zoneinfo/zone.tab b/lib/pytz/zoneinfo/zone.tab index 98ee87f178..bf1bb71c62 100644 --- a/lib/pytz/zoneinfo/zone.tab +++ b/lib/pytz/zoneinfo/zone.tab @@ -329,14 +329,16 @@ RS +4450+02030 Europe/Belgrade RU +5443+02030 Europe/Kaliningrad MSK-01 - Kaliningrad RU +554521+0373704 Europe/Moscow MSK+00 - Moscow area RU +4457+03406 Europe/Simferopol MSK+00 - Crimea -RU +4844+04425 Europe/Volgograd MSK+00 - Volgograd, Kirov, Saratov +RU +4844+04425 Europe/Volgograd MSK+00 - Volgograd, Saratov +RU +5836+04939 Europe/Kirov MSK+00 - Kirov RU +4621+04803 Europe/Astrakhan MSK+01 - Astrakhan RU +5312+05009 Europe/Samara MSK+01 - Samara, Udmurtia RU +5420+04824 Europe/Ulyanovsk MSK+01 - Ulyanovsk RU +5651+06036 Asia/Yekaterinburg MSK+02 - Urals RU +5500+07324 Asia/Omsk MSK+03 - Omsk -RU +5502+08255 Asia/Novosibirsk MSK+03 - Novosibirsk, Tomsk +RU +5502+08255 Asia/Novosibirsk MSK+03 - Novosibirsk RU +5322+08345 Asia/Barnaul MSK+04 - Altai +RU +5630+08458 Asia/Tomsk MSK+04 - Tomsk RU +5345+08707 Asia/Novokuznetsk MSK+04 - Kemerovo RU +5601+09250 Asia/Krasnoyarsk MSK+04 - Krasnoyarsk area RU +5216+10420 Asia/Irkutsk MSK+05 - Irkutsk, Buryatia @@ -345,7 +347,7 @@ RU +6200+12940 Asia/Yakutsk MSK+06 - Lena River RU +623923+1353314 Asia/Khandyga MSK+06 - Tomponsky, Ust-Maysky RU +4310+13156 Asia/Vladivostok MSK+07 - Amur River RU +643337+1431336 Asia/Ust-Nera MSK+07 - Oymyakonsky -RU +5934+15048 Asia/Magadan MSK+07 - Magadan +RU +5934+15048 Asia/Magadan MSK+08 - Magadan RU +4658+14242 Asia/Sakhalin MSK+08 - Sakhalin Island RU +6728+15343 Asia/Srednekolymsk MSK+08 - Sakha (E); North Kuril Is RU +5301+15839 Asia/Kamchatka MSK+09 - Kamchatka diff --git a/lib/pytz/zoneinfo/zone1970.tab b/lib/pytz/zoneinfo/zone1970.tab index e2be4e7161..49f0b0b60a 100644 --- a/lib/pytz/zoneinfo/zone1970.tab +++ b/lib/pytz/zoneinfo/zone1970.tab @@ -286,14 +286,16 @@ RS,BA,HR,ME,MK,SI +4450+02030 Europe/Belgrade RU +5443+02030 Europe/Kaliningrad MSK-01 - Kaliningrad RU +554521+0373704 Europe/Moscow MSK+00 - Moscow area RU +4457+03406 Europe/Simferopol MSK+00 - Crimea -RU +4844+04425 Europe/Volgograd MSK+00 - Volgograd, Kirov, Saratov +RU +4844+04425 Europe/Volgograd MSK+00 - Volgograd, Saratov +RU +5836+04939 Europe/Kirov MSK+00 - Kirov RU +4621+04803 Europe/Astrakhan MSK+01 - Astrakhan RU +5312+05009 Europe/Samara MSK+01 - Samara, Udmurtia RU +5420+04824 Europe/Ulyanovsk MSK+01 - Ulyanovsk RU +5651+06036 Asia/Yekaterinburg MSK+02 - Urals RU +5500+07324 Asia/Omsk MSK+03 - Omsk -RU +5502+08255 Asia/Novosibirsk MSK+03 - Novosibirsk, Tomsk +RU +5502+08255 Asia/Novosibirsk MSK+03 - Novosibirsk RU +5322+08345 Asia/Barnaul MSK+04 - Altai +RU +5630+08458 Asia/Tomsk MSK+04 - Tomsk RU +5345+08707 Asia/Novokuznetsk MSK+04 - Kemerovo RU +5601+09250 Asia/Krasnoyarsk MSK+04 - Krasnoyarsk area RU +5216+10420 Asia/Irkutsk MSK+05 - Irkutsk, Buryatia @@ -302,7 +304,7 @@ RU +6200+12940 Asia/Yakutsk MSK+06 - Lena River RU +623923+1353314 Asia/Khandyga MSK+06 - Tomponsky, Ust-Maysky RU +4310+13156 Asia/Vladivostok MSK+07 - Amur River RU +643337+1431336 Asia/Ust-Nera MSK+07 - Oymyakonsky -RU +5934+15048 Asia/Magadan MSK+07 - Magadan +RU +5934+15048 Asia/Magadan MSK+08 - Magadan RU +4658+14242 Asia/Sakhalin MSK+08 - Sakhalin Island RU +6728+15343 Asia/Srednekolymsk MSK+08 - Sakha (E); North Kuril Is RU +5301+15839 Asia/Kamchatka MSK+09 - Kamchatka diff --git a/lib/rarfile/__init__.py b/lib/rarfile.py similarity index 96% rename from lib/rarfile/__init__.py rename to lib/rarfile.py index 28d24f6235..25b61196a6 100644 --- a/lib/rarfile/__init__.py +++ b/lib/rarfile.py @@ -74,7 +74,7 @@ """ -__version__ = '2.7' +__version__ = '2.8' # export only interesting items __all__ = ['is_rarfile', 'RarInfo', 'RarFile', 'RarExtFile'] @@ -97,7 +97,7 @@ try: from cryptography.hazmat.primitives.ciphers import algorithms, modes, Cipher from cryptography.hazmat.backends import default_backend - class AES_CBC_Decrypt: + class AES_CBC_Decrypt(object): block_size = 16 def __init__(self, key, iv): ciph = Cipher(algorithms.AES(key), modes.CBC(iv), default_backend()) @@ -106,7 +106,7 @@ def decrypt(self, data): return self.dec.update(data) except ImportError: from Crypto.Cipher import AES - class AES_CBC_Decrypt: + class AES_CBC_Decrypt(object): block_size = 16 def __init__(self, key, iv): self.dec = AES.new(key, AES.MODE_CBC, iv) @@ -508,7 +508,7 @@ def needs_password(self): def namelist(self): '''Return list of filenames in archive.''' - return [f.filename for f in self._info_list] + return [f.filename for f in self.infolist()] def infolist(self): '''Return RarInfo objects for all files/directories in archive.''' @@ -636,7 +636,7 @@ def close(self): def printdir(self): """Print archive file list to stdout.""" - for f in self._info_list: + for f in self.infolist(): print(f.filename) def extract(self, member, path=None, pwd=None): @@ -683,10 +683,22 @@ def testrar(self): """ cmd = [UNRAR_TOOL] + list(TEST_ARGS) add_password_arg(cmd, self._password) - cmd.append(self.rarfile) - p = custom_popen(cmd) - output = p.communicate()[0] - check_returncode(p, output) + cmd.append('--') + + if is_filelike(self.rarfile): + tmpname = membuf_tempfile(self.rarfile) + cmd.append(tmpname) + else: + tmpname = None + cmd.append(self.rarfile) + + try: + p = custom_popen(cmd) + output = p.communicate()[0] + check_returncode(p, output) + finally: + if tmpname: + os.unlink(tmpname) def strerror(self): """Return error string if parsing failed, @@ -758,7 +770,7 @@ def _parse_real(self): if id != RAR_ID: if isinstance(self.rarfile, (str, unicode)): raise NotRarFile("Not a Rar archive: {}".format(self.rarfile)) - raise NonRarFile("Not a Rar archive") + raise NotRarFile("Not a Rar archive") volume = 0 # first vol (.rar) is 0 more_vols = 0 @@ -1149,7 +1161,7 @@ def _read_comment_v3(self, inf, psw=None): if self._crc_check: crc = crc32(cmt) if crc < 0: - crc += (long(1) << 32) + crc += (1 << 32) if crc != inf.CRC: return None @@ -1157,23 +1169,7 @@ def _read_comment_v3(self, inf, psw=None): # write in-memory archive to temp file - needed for solid archives def _open_unrar_membuf(self, memfile, inf, psw): - memfile.seek(0, 0) - - tmpfd, tmpname = mkstemp(suffix='.rar') - tmpf = os.fdopen(tmpfd, "wb") - - try: - BSIZE = 32*1024 - while True: - buf = memfile.read(BSIZE) - if not buf: - break - tmpf.write(buf) - tmpf.close() - except: - tmpf.close() - os.unlink(tmpname) - raise + tmpname = membuf_tempfile(memfile) return self._open_unrar(tmpname, inf, psw, tmpname) # extract using unrar @@ -1215,9 +1211,15 @@ def _extract(self, fnlist, path=None, psw=None): # pasoword psw = psw or self._password add_password_arg(cmd, psw) + cmd.append('--') # rar file - cmd.append(self.rarfile) + if is_filelike(self.rarfile): + tmpname = membuf_tempfile(self.rarfile) + cmd.append(tmpname) + else: + tmpname = None + cmd.append(self.rarfile) # file list for fn in fnlist: @@ -1230,15 +1232,19 @@ def _extract(self, fnlist, path=None, psw=None): cmd.append(path + os.sep) # call - p = custom_popen(cmd) - output = p.communicate()[0] - check_returncode(p, output) + try: + p = custom_popen(cmd) + output = p.communicate()[0] + check_returncode(p, output) + finally: + if tmpname: + os.unlink(tmpname) ## ## Utility classes ## -class UnicodeFilename: +class UnicodeFilename(object): """Handle unicode filename decompression""" def __init__(self, name, encdata): @@ -1315,7 +1321,7 @@ class RarExtFile(RawIOBase): name = None def __init__(self, rf, inf): - RawIOBase.__init__(self) + super(RarExtFile, self).__init__() # standard io.* properties self.name = inf.filename @@ -1373,7 +1379,7 @@ def _check(self): raise BadRarFile("Failed the read enough data") crc = self.CRC if crc < 0: - crc += (long(1) << 32) + crc += (1 << 32) if crc != self.inf.CRC: raise BadRarFile("Corrupt file - CRC check failed: " + self.inf.filename) @@ -1383,7 +1389,7 @@ def _read(self, cnt): def close(self): """Close open resources.""" - RawIOBase.close(self) + super(RarExtFile, self).close() if self.fd: self.fd.close() @@ -1496,7 +1502,7 @@ def __init__(self, rf, inf, cmd, tempfile=None): self.cmd = cmd self.proc = None self.tempfile = tempfile - RarExtFile.__init__(self, rf, inf) + super(PipeReader, self).__init__(rf, inf) def _close_proc(self): if not self.proc: @@ -1512,7 +1518,7 @@ def _close_proc(self): self.proc = None def _open(self): - RarExtFile._open(self) + super(PipeReader, self)._open() # stop old process self._close_proc() @@ -1549,7 +1555,7 @@ def close(self): """Close open resources.""" self._close_proc() - RarExtFile.close(self) + super(PipeReader, self).close() if self.tempfile: try: @@ -1580,7 +1586,7 @@ class DirectReader(RarExtFile): """Read uncompressed data directly from archive.""" def _open(self): - RarExtFile._open(self) + super(DirectReader, self)._open() self.volfile = self.inf.volume_file self.fd = XFile(self.volfile, 0) @@ -1694,7 +1700,7 @@ def readinto(self, buf): return got -class HeaderDecrypt: +class HeaderDecrypt(object): """File-like object that decrypts from another file""" def __init__(self, f, key, iv): self.f = f @@ -1900,8 +1906,7 @@ def custom_popen(cmd): p = Popen(cmd, bufsize = 0, stdout = PIPE, stdin = PIPE, stderr = STDOUT, creationflags = creationflags) - except OSError: - ex = sys.exc_info()[1] + except OSError as ex: if ex.errno == errno.ENOENT: raise RarCannotExec("Unrar not installed? (rarfile.UNRAR_TOOL=%r)" % UNRAR_TOOL) raise @@ -1955,6 +1960,26 @@ def check_returncode(p, out): raise exc(msg) +def membuf_tempfile(memfile): + memfile.seek(0, 0) + + tmpfd, tmpname = mkstemp(suffix='.rar') + tmpf = os.fdopen(tmpfd, "wb") + + try: + BSIZE = 32*1024 + while True: + buf = memfile.read(BSIZE) + if not buf: + break + tmpf.write(buf) + tmpf.close() + return tmpname + except: + tmpf.close() + os.unlink(tmpname) + raise + # # Check if unrar works # diff --git a/lib/rarfile/dumprar.py b/lib/rarfile/dumprar.py deleted file mode 100644 index f7ab062b0c..0000000000 --- a/lib/rarfile/dumprar.py +++ /dev/null @@ -1,361 +0,0 @@ -#! /usr/bin/env python - -"""Dump archive contents, test extraction.""" - -import io -import sys -import rarfile as rf -from binascii import crc32, hexlify -from datetime import datetime - -try: - bytearray -except NameError: - import array - def bytearray(v): - return array.array('B', v) - -rf.UNICODE_COMMENTS = 1 -rf.USE_DATETIME = 1 - -usage = """ -dumprar [switches] [ARC1 ARC2 ...] [@ARCLIST] -switches: - @file read archive names from file - -pPSW set password - -Ccharset set fallback charset - -v increase verbosity - -t attempt to read all files - -x write read files out - -c show archive comment - -h show usage - -- stop switch parsing -""".strip() - -os_list = ['DOS', 'OS2', 'WIN', 'UNIX', 'MACOS', 'BEOS'] - -block_strs = ['MARK', 'MAIN', 'FILE', 'OLD_COMMENT', 'OLD_EXTRA', - 'OLD_SUB', 'OLD_RECOVERY', 'OLD_AUTH', 'SUB', 'ENDARC'] - -def rarType(type): - if type < rf.RAR_BLOCK_MARK or type > rf.RAR_BLOCK_ENDARC: - return "*UNKNOWN*" - return block_strs[type - rf.RAR_BLOCK_MARK] - -main_bits = ( - (rf.RAR_MAIN_VOLUME, "VOL"), - (rf.RAR_MAIN_COMMENT, "COMMENT"), - (rf.RAR_MAIN_LOCK, "LOCK"), - (rf.RAR_MAIN_SOLID, "SOLID"), - (rf.RAR_MAIN_NEWNUMBERING, "NEWNR"), - (rf.RAR_MAIN_AUTH, "AUTH"), - (rf.RAR_MAIN_RECOVERY, "RECOVERY"), - (rf.RAR_MAIN_PASSWORD, "PASSWORD"), - (rf.RAR_MAIN_FIRSTVOLUME, "FIRSTVOL"), - (rf.RAR_SKIP_IF_UNKNOWN, "SKIP"), - (rf.RAR_LONG_BLOCK, "LONG"), -) - -endarc_bits = ( - (rf.RAR_ENDARC_NEXT_VOLUME, "NEXTVOL"), - (rf.RAR_ENDARC_DATACRC, "DATACRC"), - (rf.RAR_ENDARC_REVSPACE, "REVSPACE"), - (rf.RAR_ENDARC_VOLNR, "VOLNR"), - (rf.RAR_SKIP_IF_UNKNOWN, "SKIP"), - (rf.RAR_LONG_BLOCK, "LONG"), -) - -file_bits = ( - (rf.RAR_FILE_SPLIT_BEFORE, "SPLIT_BEFORE"), - (rf.RAR_FILE_SPLIT_AFTER, "SPLIT_AFTER"), - (rf.RAR_FILE_PASSWORD, "PASSWORD"), - (rf.RAR_FILE_COMMENT, "COMMENT"), - (rf.RAR_FILE_SOLID, "SOLID"), - (rf.RAR_FILE_LARGE, "LARGE"), - (rf.RAR_FILE_UNICODE, "UNICODE"), - (rf.RAR_FILE_SALT, "SALT"), - (rf.RAR_FILE_VERSION, "VERSION"), - (rf.RAR_FILE_EXTTIME, "EXTTIME"), - (rf.RAR_FILE_EXTFLAGS, "EXTFLAGS"), - (rf.RAR_SKIP_IF_UNKNOWN, "SKIP"), - (rf.RAR_LONG_BLOCK, "LONG"), -) - -generic_bits = ( - (rf.RAR_SKIP_IF_UNKNOWN, "SKIP"), - (rf.RAR_LONG_BLOCK, "LONG"), -) - -file_parms = ("D64", "D128", "D256", "D512", - "D1024", "D2048", "D4096", "DIR") - -def xprint(m, *args): - if sys.hexversion < 0x3000000: - m = m.decode('utf8') - if args: - m = m % args - if sys.hexversion < 0x3000000: - m = m.encode('utf8') - sys.stdout.write(m) - sys.stdout.write('\n') - -def render_flags(flags, bit_list): - res = [] - known = 0 - for bit in bit_list: - known = known | bit[0] - if flags & bit[0]: - res.append(bit[1]) - unknown = flags & ~known - n = 0 - while unknown: - if unknown & 1: - res.append("UNK_%04x" % (1 << n)) - unknown = unknown >> 1 - n += 1 - - return ",".join(res) - -def get_file_flags(flags): - res = render_flags(flags & ~rf.RAR_FILE_DICTMASK, file_bits) - - xf = (flags & rf.RAR_FILE_DICTMASK) >> 5 - res += "," + file_parms[xf] - return res - -def get_main_flags(flags): - return render_flags(flags, main_bits) - -def get_endarc_flags(flags): - return render_flags(flags, endarc_bits) - -def get_generic_flags(flags): - return render_flags(flags, generic_bits) - -def fmt_time(t): - if isinstance(t, datetime): - return t.isoformat(' ') - return "%04d-%02d-%02d %02d:%02d:%02d" % t - -def show_item(h): - st = rarType(h.type) - unknown = h.header_size - h.header_base - xprint("%s: hdrlen=%d datlen=%d hdr_unknown=%d", st, h.header_size, - h.add_size, unknown) - if unknown > 0 and cf_verbose > 1: - dat = h.header_data[h.header_base : ] - xprint(" unknown: %s", hexlify(dat)) - if h.type in (rf.RAR_BLOCK_FILE, rf.RAR_BLOCK_SUB): - if h.host_os == rf.RAR_OS_UNIX: - s_mode = "0%o" % h.mode - else: - s_mode = "0x%x" % h.mode - xprint(" flags=0x%04x:%s", h.flags, get_file_flags(h.flags)) - if h.host_os >= 0 and h.host_os < len(os_list): - s_os = os_list[h.host_os] - else: - s_os = "?" - xprint(" os=%d:%s ver=%d mode=%s meth=%c cmp=%d dec=%d vol=%d", - h.host_os, s_os, - h.extract_version, s_mode, h.compress_type, - h.compress_size, h.file_size, h.volume) - ucrc = (h.CRC + (1 << 32)) & ((1 << 32) - 1) - xprint(" crc=0x%08x (%d) time=%s", ucrc, h.CRC, fmt_time(h.date_time)) - xprint(" name=%s", h.filename) - if h.mtime: - xprint(" mtime=%s", fmt_time(h.mtime)) - if h.ctime: - xprint(" ctime=%s", fmt_time(h.ctime)) - if h.atime: - xprint(" atime=%s", fmt_time(h.atime)) - if h.arctime: - xprint(" arctime=%s", fmt_time(h.arctime)) - elif h.type == rf.RAR_BLOCK_MAIN: - xprint(" flags=0x%04x:%s", h.flags, get_main_flags(h.flags)) - elif h.type == rf.RAR_BLOCK_ENDARC: - xprint(" flags=0x%04x:%s", h.flags, get_endarc_flags(h.flags)) - elif h.type == rf.RAR_BLOCK_MARK: - xprint(" flags=0x%04x:", h.flags) - else: - xprint(" flags=0x%04x:%s", h.flags, get_generic_flags(h.flags)) - - if h.comment is not None: - cm = repr(h.comment) - if cm[0] == 'u': - cm = cm[1:] - xprint(" comment=%s", cm) - -cf_show_comment = 0 -cf_verbose = 0 -cf_charset = None -cf_extract = 0 -cf_test_read = 0 -cf_test_unrar = 0 -cf_test_memory = 0 - -def check_crc(f, inf): - ucrc = f.CRC - if ucrc < 0: - ucrc += (long(1) << 32) - if ucrc != inf.CRC: - print ('crc error') - -def test_read_long(r, inf): - f = r.open(inf.filename) - total = 0 - while 1: - data = f.read(8192) - if not data: - break - total += len(data) - if total != inf.file_size: - xprint("\n *** %s has corrupt file: %s ***", r.rarfile, inf.filename) - xprint(" *** short read: got=%d, need=%d ***\n", total, inf.file_size) - check_crc(f, inf) - - # test .seek() & .readinto() - if cf_test_read > 1: - f.seek(0,0) - - # hack: re-enable crc calc - f.crc_check = 1 - f.CRC = 0 - - total = 0 - buf = bytearray(rf.ZERO*4096) - while 1: - res = f.readinto(buf) - if not res: - break - total += res - if inf.file_size != total: - xprint(" *** readinto failed: got=%d, need=%d ***\n", total, inf.file_size) - check_crc(f, inf) - f.close() - -def test_read(r, inf): - test_read_long(r, inf) - - -def test_real(fn, psw): - xprint("Archive: %s", fn) - - cb = None - if cf_verbose > 1: - cb = show_item - - rfarg = fn - if cf_test_memory: - rfarg = io.BytesIO(open(fn, 'rb').read()) - - # check if rar - if not rf.is_rarfile(rfarg): - xprint(" --- %s is not a RAR file ---", fn) - return - - # open - r = rf.RarFile(rfarg, charset = cf_charset, info_callback = cb) - # set password - if r.needs_password(): - if psw: - r.setpassword(psw) - else: - xprint(" --- %s requires password ---", fn) - return - - # show comment - if cf_show_comment and r.comment: - for ln in r.comment.split('\n'): - xprint(" %s", ln) - elif cf_verbose == 1 and r.comment: - cm = repr(r.comment) - if cm[0] == 'u': - cm = cm[1:] - xprint(" comment=%s", cm) - - # process - for n in r.namelist(): - inf = r.getinfo(n) - if inf.isdir(): - continue - if cf_verbose == 1: - show_item(inf) - if cf_test_read: - test_read(r, inf) - - if cf_extract: - r.extractall() - for inf in r.infolist(): - r.extract(inf) - - if cf_test_unrar: - r.testrar() - -def test(fn, psw): - try: - test_real(fn, psw) - except rf.NeedFirstVolume: - xprint(" --- %s is middle part of multi-vol archive ---", fn) - except rf.Error: - exc, msg, tb = sys.exc_info() - xprint("\n *** %s: %s ***\n", exc.__name__, msg) - del tb - except IOError: - exc, msg, tb = sys.exc_info() - xprint("\n *** %s: %s ***\n", exc.__name__, msg) - del tb - -def main(): - global cf_verbose, cf_show_comment, cf_charset - global cf_extract, cf_test_read, cf_test_unrar - global cf_test_memory - - # parse args - args = [] - psw = None - noswitch = False - for a in sys.argv[1:]: - if noswitch: - args.append(a) - elif a[0] == "@": - for ln in open(a[1:], 'r'): - fn = ln[:-1] - args.append(fn) - elif a[0] != '-': - args.append(a) - elif a[1] == 'p': - psw = a[2:] - elif a == '--': - noswitch = True - elif a == '-h': - xprint(usage) - return - elif a == '-v': - cf_verbose += 1 - elif a == '-c': - cf_show_comment = 1 - elif a == '-x': - cf_extract = 1 - elif a == '-t': - cf_test_read += 1 - elif a == '-T': - cf_test_unrar = 1 - elif a == '-M': - cf_test_memory = 1 - elif a[1] == 'C': - cf_charset = a[2:] - else: - raise Exception("unknown switch: "+a) - if not args: - xprint(usage) - - for fn in args: - test(fn, psw) - - -if __name__ == '__main__': - try: - main() - except KeyboardInterrupt: - pass - diff --git a/lib/requests/__init__.py b/lib/requests/__init__.py index 0091b387c8..82c0f78074 100644 --- a/lib/requests/__init__.py +++ b/lib/requests/__init__.py @@ -36,14 +36,14 @@ The other HTTP methods are supported - see `requests.api`. Full documentation is at . -:copyright: (c) 2015 by Kenneth Reitz. +:copyright: (c) 2016 by Kenneth Reitz. :license: Apache 2.0, see LICENSE for more details. """ __title__ = 'requests' -__version__ = '2.9.1' -__build__ = 0x020901 +__version__ = '2.10.0' +__build__ = 0x021000 __author__ = 'Kenneth Reitz' __license__ = 'Apache 2.0' __copyright__ = 'Copyright 2016 Kenneth Reitz' @@ -55,6 +55,12 @@ except ImportError: pass +import warnings + +# urllib3's DependencyWarnings should be silenced. +from .packages.urllib3.exceptions import DependencyWarning +warnings.simplefilter('ignore', DependencyWarning) + from . import utils from .models import Request, Response, PreparedRequest from .api import request, get, head, post, patch, put, delete, options diff --git a/lib/requests/adapters.py b/lib/requests/adapters.py index 4f2b23cf03..23e448f42e 100644 --- a/lib/requests/adapters.py +++ b/lib/requests/adapters.py @@ -19,7 +19,7 @@ from .compat import urlparse, basestring from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers, prepend_scheme_if_needed, get_auth_from_url, urldefragauth, - select_proxy) + select_proxy, to_native_string) from .structures import CaseInsensitiveDict from .packages.urllib3.exceptions import ClosedPoolError from .packages.urllib3.exceptions import ConnectTimeoutError @@ -33,9 +33,15 @@ from .packages.urllib3.exceptions import ResponseError from .cookies import extract_cookies_to_jar from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError, - ProxyError, RetryError) + ProxyError, RetryError, InvalidSchema) from .auth import _basic_auth_str +try: + from .packages.urllib3.contrib.socks import SOCKSProxyManager +except ImportError: + def SOCKSProxyManager(*args, **kwargs): + raise InvalidSchema("Missing dependencies for SOCKS support.") + DEFAULT_POOLBLOCK = False DEFAULT_POOLSIZE = 10 DEFAULT_RETRIES = 0 @@ -149,9 +155,22 @@ def proxy_manager_for(self, proxy, **proxy_kwargs): :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. :returns: ProxyManager """ - if not proxy in self.proxy_manager: + if proxy in self.proxy_manager: + manager = self.proxy_manager[proxy] + elif proxy.lower().startswith('socks'): + username, password = get_auth_from_url(proxy) + manager = self.proxy_manager[proxy] = SOCKSProxyManager( + proxy, + username=username, + password=password, + num_pools=self._pool_connections, + maxsize=self._pool_maxsize, + block=self._pool_block, + **proxy_kwargs + ) + else: proxy_headers = self.proxy_headers(proxy) - self.proxy_manager[proxy] = proxy_from_url( + manager = self.proxy_manager[proxy] = proxy_from_url( proxy, proxy_headers=proxy_headers, num_pools=self._pool_connections, @@ -159,7 +178,7 @@ def proxy_manager_for(self, proxy, **proxy_kwargs): block=self._pool_block, **proxy_kwargs) - return self.proxy_manager[proxy] + return manager def cert_verify(self, conn, url, verify, cert): """Verify a SSL certificate. This method should not be called from user @@ -264,10 +283,12 @@ def get_connection(self, url, proxies=None): def close(self): """Disposes of any internal state. - Currently, this just closes the PoolManager, which closes pooled - connections. + Currently, this closes the PoolManager and any active ProxyManager, + which closes any pooled connections. """ self.poolmanager.clear() + for proxy in self.proxy_manager.values(): + proxy.clear() def request_url(self, request, proxies): """Obtain the url to use when making the final request. @@ -284,10 +305,16 @@ def request_url(self, request, proxies): """ proxy = select_proxy(request.url, proxies) scheme = urlparse(request.url).scheme - if proxy and scheme != 'https': + + is_proxied_http_request = (proxy and scheme != 'https') + using_socks_proxy = False + if proxy: + proxy_scheme = urlparse(proxy).scheme.lower() + using_socks_proxy = proxy_scheme.startswith('socks') + + url = request.path_url + if is_proxied_http_request and not using_socks_proxy: url = urldefragauth(request.url) - else: - url = request.path_url return url @@ -434,6 +461,9 @@ def send(self, request, stream=False, timeout=None, verify=True, cert=None, prox if isinstance(e.reason, ResponseError): raise RetryError(e, request=request) + if isinstance(e.reason, _ProxyError): + raise ProxyError(e, request=request) + raise ConnectionError(e, request=request) except ClosedPoolError as e: diff --git a/lib/requests/api.py b/lib/requests/api.py index b21a1a4fa7..c2068d0eda 100644 --- a/lib/requests/api.py +++ b/lib/requests/api.py @@ -24,7 +24,11 @@ def request(method, url, **kwargs): :param json: (optional) json data to send in the body of the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. - :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': ('filename', fileobj)}``) for multipart encoding upload. + :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. + ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` + or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string + defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers + to add for the file. :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read diff --git a/lib/requests/auth.py b/lib/requests/auth.py index edf4c8dcd7..73f8e9da8d 100644 --- a/lib/requests/auth.py +++ b/lib/requests/auth.py @@ -93,6 +93,7 @@ def build_digest_header(self, method, url): qop = self._thread_local.chal.get('qop') algorithm = self._thread_local.chal.get('algorithm') opaque = self._thread_local.chal.get('opaque') + hash_utf8 = None if algorithm is None: _algorithm = 'MD5' diff --git a/lib/requests/models.py b/lib/requests/models.py index 4bcbc5484a..fe4bec1bd3 100644 --- a/lib/requests/models.py +++ b/lib/requests/models.py @@ -103,8 +103,10 @@ def _encode_files(files, data): """Build the body for a multipart/form-data request. Will successfully encode files when passed as a dict or a list of - 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary + tuples. Order is retained if data is a list of tuples but arbitrary if parameters are supplied as a dict. + The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) + or 4-tuples (filename, fileobj, contentype, custom_headers). """ if (not files): @@ -463,9 +465,11 @@ def prepare_body(self, data, files, json=None): def prepare_content_length(self, body): if hasattr(body, 'seek') and hasattr(body, 'tell'): + curr_pos = body.tell() body.seek(0, 2) - self.headers['Content-Length'] = builtin_str(body.tell()) - body.seek(0, 0) + end_pos = body.tell() + self.headers['Content-Length'] = builtin_str(max(0, end_pos - curr_pos)) + body.seek(curr_pos, 0) elif body is not None: l = super_len(body) if l: diff --git a/lib/requests/packages/README.rst b/lib/requests/packages/README.rst deleted file mode 100644 index 83e0c6258d..0000000000 --- a/lib/requests/packages/README.rst +++ /dev/null @@ -1,11 +0,0 @@ -If you are planning to submit a pull request to requests with any changes in -this library do not go any further. These are independent libraries which we -vendor into requests. Any changes necessary to these libraries must be made in -them and submitted as separate pull requests to those libraries. - -urllib3 pull requests go here: https://github.com/shazow/urllib3 - -chardet pull requests go here: https://github.com/chardet/chardet - -See https://github.com/kennethreitz/requests/pull/1812#issuecomment-30854316 -for the reasoning behind this. diff --git a/lib/requests/packages/urllib3/__init__.py b/lib/requests/packages/urllib3/__init__.py index e43991a974..73668991fd 100644 --- a/lib/requests/packages/urllib3/__init__.py +++ b/lib/requests/packages/urllib3/__init__.py @@ -32,7 +32,7 @@ def emit(self, record): __author__ = 'Andrey Petrov (andrey.petrov@shazow.net)' __license__ = 'MIT' -__version__ = '1.13.1' +__version__ = '1.15.1' __all__ = ( 'HTTPConnectionPool', @@ -68,22 +68,25 @@ def add_stderr_logger(level=logging.DEBUG): handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s')) logger.addHandler(handler) logger.setLevel(level) - logger.debug('Added a stderr logging handler to logger: %s' % __name__) + logger.debug('Added a stderr logging handler to logger: %s', __name__) return handler # ... Clean up. del NullHandler +# All warning filters *must* be appended unless you're really certain that they +# shouldn't be: otherwise, it's very hard for users to use most Python +# mechanisms to silence them. # SecurityWarning's always go off by default. warnings.simplefilter('always', exceptions.SecurityWarning, append=True) # SubjectAltNameWarning's should go off once per host -warnings.simplefilter('default', exceptions.SubjectAltNameWarning) +warnings.simplefilter('default', exceptions.SubjectAltNameWarning, append=True) # InsecurePlatformWarning's don't vary between requests, so we keep it default. warnings.simplefilter('default', exceptions.InsecurePlatformWarning, append=True) # SNIMissingWarnings should go off only once. -warnings.simplefilter('default', exceptions.SNIMissingWarning) +warnings.simplefilter('default', exceptions.SNIMissingWarning, append=True) def disable_warnings(category=exceptions.HTTPWarning): diff --git a/lib/requests/packages/urllib3/_collections.py b/lib/requests/packages/urllib3/_collections.py index 67f3ce994d..77cee01704 100644 --- a/lib/requests/packages/urllib3/_collections.py +++ b/lib/requests/packages/urllib3/_collections.py @@ -134,7 +134,7 @@ class HTTPHeaderDict(MutableMapping): def __init__(self, headers=None, **kwargs): super(HTTPHeaderDict, self).__init__() - self._container = {} + self._container = OrderedDict() if headers is not None: if isinstance(headers, HTTPHeaderDict): self._copy_from(headers) diff --git a/lib/requests/packages/urllib3/connection.py b/lib/requests/packages/urllib3/connection.py index 1e4cd41758..5ce0080480 100644 --- a/lib/requests/packages/urllib3/connection.py +++ b/lib/requests/packages/urllib3/connection.py @@ -1,5 +1,6 @@ from __future__ import absolute_import import datetime +import logging import os import sys import socket @@ -38,7 +39,7 @@ class ConnectionError(Exception): SubjectAltNameWarning, SystemTimeWarning, ) -from .packages.ssl_match_hostname import match_hostname +from .packages.ssl_match_hostname import match_hostname, CertificateError from .util.ssl_ import ( resolve_cert_reqs, @@ -50,6 +51,10 @@ class ConnectionError(Exception): from .util import connection +from ._collections import HTTPHeaderDict + +log = logging.getLogger(__name__) + port_by_scheme = { 'http': 80, 'https': 443, @@ -162,6 +167,38 @@ def connect(self): conn = self._new_conn() self._prepare_conn(conn) + def request_chunked(self, method, url, body=None, headers=None): + """ + Alternative to the common request method, which sends the + body with chunked encoding and not as one block + """ + headers = HTTPHeaderDict(headers if headers is not None else {}) + skip_accept_encoding = 'accept-encoding' in headers + self.putrequest(method, url, skip_accept_encoding=skip_accept_encoding) + for header, value in headers.items(): + self.putheader(header, value) + if 'transfer-encoding' not in headers: + self.putheader('Transfer-Encoding', 'chunked') + self.endheaders() + + if body is not None: + stringish_types = six.string_types + (six.binary_type,) + if isinstance(body, stringish_types): + body = (body,) + for chunk in body: + if not chunk: + continue + if not isinstance(chunk, six.binary_type): + chunk = chunk.encode('utf8') + len_str = hex(len(chunk))[2:] + self.send(len_str.encode('utf-8')) + self.send(b'\r\n') + self.send(chunk) + self.send(b'\r\n') + + # After the if clause, to always have a closed body + self.send(b'0\r\n\r\n') + class HTTPSConnection(HTTPConnection): default_port = port_by_scheme['https'] @@ -265,21 +302,26 @@ def connect(self): 'for details.)'.format(hostname)), SubjectAltNameWarning ) - - # In case the hostname is an IPv6 address, strip the square - # brackets from it before using it to validate. This is because - # a certificate with an IPv6 address in it won't have square - # brackets around that address. Sadly, match_hostname won't do this - # for us: it expects the plain host part without any extra work - # that might have been done to make it palatable to httplib. - asserted_hostname = self.assert_hostname or hostname - asserted_hostname = asserted_hostname.strip('[]') - match_hostname(cert, asserted_hostname) + _match_hostname(cert, self.assert_hostname or hostname) self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED or self.assert_fingerprint is not None) +def _match_hostname(cert, asserted_hostname): + try: + match_hostname(cert, asserted_hostname) + except CertificateError as e: + log.error( + 'Certificate did not match expected hostname: %s. ' + 'Certificate: %s', asserted_hostname, cert + ) + # Add cert to exception and reraise so client code can inspect + # the cert when catching the exception, if they want to + e._peer_cert = cert + raise + + if ssl: # Make a copy for testing. UnverifiedHTTPSConnection = HTTPSConnection diff --git a/lib/requests/packages/urllib3/connectionpool.py b/lib/requests/packages/urllib3/connectionpool.py index 995b4167b5..3fcfb12012 100644 --- a/lib/requests/packages/urllib3/connectionpool.py +++ b/lib/requests/packages/urllib3/connectionpool.py @@ -69,7 +69,13 @@ def __init__(self, host, port=None): if not host: raise LocationValueError("No host specified.") - self.host = host + # httplib doesn't like it when we include brackets in ipv6 addresses + # Specifically, if we include brackets but also pass the port then + # httplib crazily doubles up the square brackets on the Host header. + # Instead, we need to make sure we never pass ``None`` as the port. + # However, for backward compatibility reasons we can't actually + # *assert* that. + self.host = host.strip('[]') self.port = port def __str__(self): @@ -203,8 +209,8 @@ def _new_conn(self): Return a fresh :class:`HTTPConnection`. """ self.num_connections += 1 - log.info("Starting new HTTP connection (%d): %s" % - (self.num_connections, self.host)) + log.info("Starting new HTTP connection (%d): %s", + self.num_connections, self.host) conn = self.ConnectionCls(host=self.host, port=self.port, timeout=self.timeout.connect_timeout, @@ -239,7 +245,7 @@ def _get_conn(self, timeout=None): # If this is a persistent connection, check if it got disconnected if conn and is_connection_dropped(conn): - log.info("Resetting dropped connection: %s" % self.host) + log.info("Resetting dropped connection: %s", self.host) conn.close() if getattr(conn, 'auto_open', 1) == 0: # This is a proxied connection that has been mutated by @@ -272,7 +278,7 @@ def _put_conn(self, conn): except Full: # This should never happen if self.block == True log.warning( - "Connection pool is full, discarding connection: %s" % + "Connection pool is full, discarding connection: %s", self.host) # Connection never got put back into the pool, close it. @@ -318,7 +324,7 @@ def _raise_timeout(self, err, url, timeout_value): if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python 2.6 raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) - def _make_request(self, conn, method, url, timeout=_Default, + def _make_request(self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw): """ Perform a request on a given urllib connection object taken from our @@ -350,7 +356,10 @@ def _make_request(self, conn, method, url, timeout=_Default, # conn.request() calls httplib.*.request, not the method in # urllib3.request. It also calls makefile (recv) on the socket. - conn.request(method, url, **httplib_request_kw) + if chunked: + conn.request_chunked(method, url, **httplib_request_kw) + else: + conn.request(method, url, **httplib_request_kw) # Reset the timeout for the recv() on the socket read_timeout = timeout_obj.read_timeout @@ -382,9 +391,8 @@ def _make_request(self, conn, method, url, timeout=_Default, # AppEngine doesn't have a version attr. http_version = getattr(conn, '_http_vsn_str', 'HTTP/?') - log.debug("\"%s %s %s\" %s %s" % (method, url, http_version, - httplib_response.status, - httplib_response.length)) + log.debug("\"%s %s %s\" %s %s", method, url, http_version, + httplib_response.status, httplib_response.length) try: assert_header_parsing(httplib_response.msg) @@ -435,7 +443,8 @@ def is_same_host(self, url): def urlopen(self, method, url, body=None, headers=None, retries=None, redirect=True, assert_same_host=True, timeout=_Default, - pool_timeout=None, release_conn=None, **response_kw): + pool_timeout=None, release_conn=None, chunked=False, + **response_kw): """ Get a connection from the pool and perform an HTTP request. This is the lowest level call for making a request, so you'll need to specify all @@ -512,6 +521,11 @@ def urlopen(self, method, url, body=None, headers=None, retries=None, back into the pool. If None, it takes the value of ``response_kw.get('preload_content', True)``. + :param chunked: + If True, urllib3 will send the body using chunked transfer + encoding. Otherwise, urllib3 will send the body using the standard + content-length form. Defaults to False. + :param \**response_kw: Additional parameters are passed to :meth:`urllib3.response.HTTPResponse.from_httplib` @@ -542,6 +556,10 @@ def urlopen(self, method, url, body=None, headers=None, retries=None, # complains about UnboundLocalError. err = None + # Keep track of whether we cleanly exited the except block. This + # ensures we do proper cleanup in finally. + clean_exit = False + try: # Request a connection from the queue. timeout_obj = self._get_timeout(timeout) @@ -556,13 +574,14 @@ def urlopen(self, method, url, body=None, headers=None, retries=None, # Make the request on the httplib connection object. httplib_response = self._make_request(conn, method, url, timeout=timeout_obj, - body=body, headers=headers) + body=body, headers=headers, + chunked=chunked) # If we're going to release the connection in ``finally:``, then - # the request doesn't need to know about the connection. Otherwise + # the response doesn't need to know about the connection. Otherwise # it will also try to release it and we'll have a double-release # mess. - response_conn = not release_conn and conn + response_conn = conn if not release_conn else None # Import httplib's response into our own wrapper object response = HTTPResponse.from_httplib(httplib_response, @@ -570,10 +589,8 @@ def urlopen(self, method, url, body=None, headers=None, retries=None, connection=response_conn, **response_kw) - # else: - # The connection will be put back into the pool when - # ``response.release_conn()`` is called (implicitly by - # ``response.read()``) + # Everything went great! + clean_exit = True except Empty: # Timed out by queue. @@ -583,22 +600,19 @@ def urlopen(self, method, url, body=None, headers=None, retries=None, # Close the connection. If a connection is reused on which there # was a Certificate error, the next request will certainly raise # another Certificate error. - conn = conn and conn.close() - release_conn = True + clean_exit = False raise SSLError(e) except SSLError: # Treat SSLError separately from BaseSSLError to preserve # traceback. - conn = conn and conn.close() - release_conn = True + clean_exit = False raise except (TimeoutError, HTTPException, SocketError, ProtocolError) as e: # Discard the connection for these exceptions. It will be # be replaced during the next _get_conn() call. - conn = conn and conn.close() - release_conn = True + clean_exit = False if isinstance(e, (SocketError, NewConnectionError)) and self.proxy: e = ProxyError('Cannot connect to proxy.', e) @@ -613,6 +627,14 @@ def urlopen(self, method, url, body=None, headers=None, retries=None, err = e finally: + if not clean_exit: + # We hit some kind of exception, handled or otherwise. We need + # to throw the connection away unless explicitly told not to. + # Close the connection, set the variable to None, and make sure + # we put the None back in the pool to avoid leaking it. + conn = conn and conn.close() + release_conn = True + if release_conn: # Put the connection back to be reused. If the connection is # expired then it will be None, which will get replaced with a @@ -622,7 +644,7 @@ def urlopen(self, method, url, body=None, headers=None, retries=None, if not conn: # Try again log.warning("Retrying (%r) after connection " - "broken by '%r': %s" % (retries, err, url)) + "broken by '%r': %s", retries, err, url) return self.urlopen(method, url, body, headers, retries, redirect, assert_same_host, timeout=timeout, pool_timeout=pool_timeout, @@ -644,7 +666,7 @@ def urlopen(self, method, url, body=None, headers=None, retries=None, raise return response - log.info("Redirecting %s -> %s" % (url, redirect_location)) + log.info("Redirecting %s -> %s", url, redirect_location) return self.urlopen( method, redirect_location, body, headers, retries=retries, redirect=redirect, @@ -654,9 +676,17 @@ def urlopen(self, method, url, body=None, headers=None, retries=None, # Check if we should retry the HTTP response. if retries.is_forced_retry(method, status_code=response.status): - retries = retries.increment(method, url, response=response, _pool=self) + try: + retries = retries.increment(method, url, response=response, _pool=self) + except MaxRetryError: + if retries.raise_on_status: + # Release the connection for this response, since we're not + # returning it to be released manually. + response.release_conn() + raise + return response retries.sleep() - log.info("Forced retry: %s" % url) + log.info("Forced retry: %s", url) return self.urlopen( method, url, body, headers, retries=retries, redirect=redirect, @@ -742,7 +772,7 @@ def _prepare_proxy(self, conn): except AttributeError: # Platform-specific: Python 2.6 set_tunnel = conn._set_tunnel - if sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and older + if sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and older set_tunnel(self.host, self.port) else: set_tunnel(self.host, self.port, self.proxy_headers) @@ -754,8 +784,8 @@ def _new_conn(self): Return a fresh :class:`httplib.HTTPSConnection`. """ self.num_connections += 1 - log.info("Starting new HTTPS connection (%d): %s" - % (self.num_connections, self.host)) + log.info("Starting new HTTPS connection (%d): %s", + self.num_connections, self.host) if not self.ConnectionCls or self.ConnectionCls is DummyConnection: raise SSLError("Can't connect to HTTPS URL because the SSL " @@ -812,6 +842,7 @@ def connection_from_url(url, **kw): >>> r = conn.request('GET', '/') """ scheme, host, port = get_host(url) + port = port or port_by_scheme.get(scheme, 80) if scheme == 'https': return HTTPSConnectionPool(host, port=port, **kw) else: diff --git a/lib/requests/packages/urllib3/contrib/appengine.py b/lib/requests/packages/urllib3/contrib/appengine.py index 884cdb220d..f4289c0ff8 100644 --- a/lib/requests/packages/urllib3/contrib/appengine.py +++ b/lib/requests/packages/urllib3/contrib/appengine.py @@ -144,7 +144,7 @@ def urlopen(self, method, url, body=None, headers=None, if retries.is_forced_retry(method, status_code=http_response.status): retries = retries.increment( method, url, response=http_response, _pool=self) - log.info("Forced retry: %s" % url) + log.info("Forced retry: %s", url) retries.sleep() return self.urlopen( method, url, @@ -164,6 +164,14 @@ def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw): if content_encoding == 'deflate': del urlfetch_resp.headers['content-encoding'] + transfer_encoding = urlfetch_resp.headers.get('transfer-encoding') + # We have a full response's content, + # so let's make sure we don't report ourselves as chunked data. + if transfer_encoding == 'chunked': + encodings = transfer_encoding.split(",") + encodings.remove('chunked') + urlfetch_resp.headers['transfer-encoding'] = ','.join(encodings) + return HTTPResponse( # In order for decoding to work, we must present the content as # a file-like object. @@ -177,7 +185,7 @@ def _get_absolute_timeout(self, timeout): if timeout is Timeout.DEFAULT_TIMEOUT: return 5 # 5s is the default timeout for URLFetch. if isinstance(timeout, Timeout): - if timeout.read is not timeout.connect: + if timeout._read is not timeout._connect: warnings.warn( "URLFetch does not support granular timeout settings, " "reverting to total timeout.", AppEnginePlatformWarning) diff --git a/lib/requests/packages/urllib3/contrib/ntlmpool.py b/lib/requests/packages/urllib3/contrib/ntlmpool.py index c136a238db..11d0b5c34d 100644 --- a/lib/requests/packages/urllib3/contrib/ntlmpool.py +++ b/lib/requests/packages/urllib3/contrib/ntlmpool.py @@ -43,8 +43,8 @@ def _new_conn(self): # Performs the NTLM handshake that secures the connection. The socket # must be kept open while requests are performed. self.num_connections += 1 - log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s' % - (self.num_connections, self.host, self.authurl)) + log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s', + self.num_connections, self.host, self.authurl) headers = {} headers['Connection'] = 'Keep-Alive' @@ -56,13 +56,13 @@ def _new_conn(self): # Send negotiation message headers[req_header] = ( 'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser)) - log.debug('Request headers: %s' % headers) + log.debug('Request headers: %s', headers) conn.request('GET', self.authurl, None, headers) res = conn.getresponse() reshdr = dict(res.getheaders()) - log.debug('Response status: %s %s' % (res.status, res.reason)) - log.debug('Response headers: %s' % reshdr) - log.debug('Response data: %s [...]' % res.read(100)) + log.debug('Response status: %s %s', res.status, res.reason) + log.debug('Response headers: %s', reshdr) + log.debug('Response data: %s [...]', res.read(100)) # Remove the reference to the socket, so that it can not be closed by # the response object (we want to keep the socket open) @@ -87,12 +87,12 @@ def _new_conn(self): self.pw, NegotiateFlags) headers[req_header] = 'NTLM %s' % auth_msg - log.debug('Request headers: %s' % headers) + log.debug('Request headers: %s', headers) conn.request('GET', self.authurl, None, headers) res = conn.getresponse() - log.debug('Response status: %s %s' % (res.status, res.reason)) - log.debug('Response headers: %s' % dict(res.getheaders())) - log.debug('Response data: %s [...]' % res.read()[:100]) + log.debug('Response status: %s %s', res.status, res.reason) + log.debug('Response headers: %s', dict(res.getheaders())) + log.debug('Response data: %s [...]', res.read()[:100]) if res.status != 200: if res.status == 401: raise Exception('Server rejected request: wrong ' diff --git a/lib/requests/packages/urllib3/contrib/pyopenssl.py b/lib/requests/packages/urllib3/contrib/pyopenssl.py index 5996153afe..ed3b9cc342 100644 --- a/lib/requests/packages/urllib3/contrib/pyopenssl.py +++ b/lib/requests/packages/urllib3/contrib/pyopenssl.py @@ -54,9 +54,17 @@ import OpenSSL.SSL from pyasn1.codec.der import decoder as der_decoder from pyasn1.type import univ, constraint -from socket import _fileobject, timeout, error as SocketError +from socket import timeout, error as SocketError + +try: # Platform-specific: Python 2 + from socket import _fileobject +except ImportError: # Platform-specific: Python 3 + _fileobject = None + from urllib3.packages.backports.makefile import backport_makefile + import ssl import select +import six from .. import connection from .. import util @@ -90,7 +98,7 @@ OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, } -DEFAULT_SSL_CIPHER_LIST = util.ssl_.DEFAULT_CIPHERS +DEFAULT_SSL_CIPHER_LIST = util.ssl_.DEFAULT_CIPHERS.encode('ascii') # OpenSSL will only write 16K at a time SSL_WRITE_BLOCKSIZE = 16384 @@ -104,6 +112,7 @@ def inject_into_urllib3(): connection.ssl_wrap_socket = ssl_wrap_socket util.HAS_SNI = HAS_SNI + util.IS_PYOPENSSL = True def extract_from_urllib3(): @@ -111,6 +120,7 @@ def extract_from_urllib3(): connection.ssl_wrap_socket = orig_connection_ssl_wrap_socket util.HAS_SNI = orig_util_HAS_SNI + util.IS_PYOPENSSL = False # Note: This is a slightly bug-fixed version of same from ndg-httpsclient. @@ -135,7 +145,7 @@ def get_subj_alt_name(peer_cert): for i in range(peer_cert.get_extension_count()): ext = peer_cert.get_extension(i) ext_name = ext.get_short_name() - if ext_name != 'subjectAltName': + if ext_name != b'subjectAltName': continue # PyOpenSSL returns extension data in ASN.1 encoded form @@ -167,13 +177,17 @@ def __init__(self, connection, socket, suppress_ragged_eofs=True): self.socket = socket self.suppress_ragged_eofs = suppress_ragged_eofs self._makefile_refs = 0 + self._closed = False def fileno(self): return self.socket.fileno() - def makefile(self, mode, bufsize=-1): - self._makefile_refs += 1 - return _fileobject(self, mode, bufsize, close=True) + # Copy-pasted from Python 3.5 source code + def _decref_socketios(self): + if self._makefile_refs > 0: + self._makefile_refs -= 1 + if self._closed: + self.close() def recv(self, *args, **kwargs): try: @@ -182,7 +196,7 @@ def recv(self, *args, **kwargs): if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'): return b'' else: - raise SocketError(e) + raise SocketError(str(e)) except OpenSSL.SSL.ZeroReturnError as e: if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: return b'' @@ -198,6 +212,27 @@ def recv(self, *args, **kwargs): else: return data + def recv_into(self, *args, **kwargs): + try: + return self.connection.recv_into(*args, **kwargs) + except OpenSSL.SSL.SysCallError as e: + if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'): + return 0 + else: + raise SocketError(str(e)) + except OpenSSL.SSL.ZeroReturnError as e: + if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: + return 0 + else: + raise + except OpenSSL.SSL.WantReadError: + rd, wd, ed = select.select( + [self.socket], [], [], self.socket.gettimeout()) + if not rd: + raise timeout('The read operation timed out') + else: + return self.recv_into(*args, **kwargs) + def settimeout(self, timeout): return self.socket.settimeout(timeout) @@ -225,6 +260,7 @@ def shutdown(self): def close(self): if self._makefile_refs < 1: try: + self._closed = True return self.connection.close() except OpenSSL.SSL.Error: return @@ -262,6 +298,16 @@ def _drop(self): self._makefile_refs -= 1 +if _fileobject: # Platform-specific: Python 2 + def makefile(self, mode, bufsize=-1): + self._makefile_refs += 1 + return _fileobject(self, mode, bufsize, close=True) +else: # Platform-specific: Python 3 + makefile = backport_makefile + +WrappedSocket.makefile = makefile + + def _verify_callback(cnx, x509, err_no, err_depth, return_code): return err_no == 0 @@ -285,7 +331,7 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, else: ctx.set_default_verify_paths() - # Disable TLS compression to migitate CRIME attack (issue #309) + # Disable TLS compression to mitigate CRIME attack (issue #309) OP_NO_COMPRESSION = 0x20000 ctx.set_options(OP_NO_COMPRESSION) @@ -293,6 +339,8 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, ctx.set_cipher_list(DEFAULT_SSL_CIPHER_LIST) cnx = OpenSSL.SSL.Connection(ctx, sock) + if isinstance(server_hostname, six.text_type): # Platform-specific: Python 3 + server_hostname = server_hostname.encode('utf-8') cnx.set_tlsext_host_name(server_hostname) cnx.set_connect_state() while True: diff --git a/lib/requests/packages/urllib3/exceptions.py b/lib/requests/packages/urllib3/exceptions.py index 8e07eb6198..f2e65917b5 100644 --- a/lib/requests/packages/urllib3/exceptions.py +++ b/lib/requests/packages/urllib3/exceptions.py @@ -180,6 +180,14 @@ class SNIMissingWarning(HTTPWarning): pass +class DependencyWarning(HTTPWarning): + """ + Warned when an attempt is made to import a module with missing optional + dependencies. + """ + pass + + class ResponseNotChunked(ProtocolError, ValueError): "Response needs to be chunked in order to read it as chunks." pass diff --git a/lib/requests/packages/urllib3/fields.py b/lib/requests/packages/urllib3/fields.py index c7d48113bd..8fa2a12767 100644 --- a/lib/requests/packages/urllib3/fields.py +++ b/lib/requests/packages/urllib3/fields.py @@ -36,11 +36,11 @@ def format_header_param(name, value): result = '%s="%s"' % (name, value) try: result.encode('ascii') - except UnicodeEncodeError: + except (UnicodeEncodeError, UnicodeDecodeError): pass else: return result - if not six.PY3: # Python 2: + if not six.PY3 and isinstance(value, six.text_type): # Python 2: value = value.encode('utf-8') value = email.utils.encode_rfc2231(value, 'utf-8') value = '%s*=%s' % (name, value) diff --git a/lib/requests/packages/urllib3/poolmanager.py b/lib/requests/packages/urllib3/poolmanager.py index f13e673d1f..1023dcba38 100644 --- a/lib/requests/packages/urllib3/poolmanager.py +++ b/lib/requests/packages/urllib3/poolmanager.py @@ -18,16 +18,16 @@ __all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url'] -pool_classes_by_scheme = { - 'http': HTTPConnectionPool, - 'https': HTTPSConnectionPool, -} - log = logging.getLogger(__name__) SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs', 'ssl_version', 'ca_cert_dir') +pool_classes_by_scheme = { + 'http': HTTPConnectionPool, + 'https': HTTPSConnectionPool, +} + class PoolManager(RequestMethods): """ @@ -65,6 +65,9 @@ def __init__(self, num_pools=10, headers=None, **connection_pool_kw): self.pools = RecentlyUsedContainer(num_pools, dispose_func=lambda p: p.close()) + # Locally set the pool classes so other PoolManagers can override them. + self.pool_classes_by_scheme = pool_classes_by_scheme + def __enter__(self): return self @@ -81,7 +84,7 @@ def _new_pool(self, scheme, host, port): by :meth:`connection_from_url` and companion methods. It is intended to be overridden for customization. """ - pool_cls = pool_classes_by_scheme[scheme] + pool_cls = self.pool_classes_by_scheme[scheme] kwargs = self.connection_pool_kw if scheme == 'http': kwargs = self.connection_pool_kw.copy() @@ -186,7 +189,7 @@ def urlopen(self, method, url, redirect=True, **kw): kw['retries'] = retries kw['redirect'] = redirect - log.info("Redirecting %s -> %s" % (url, redirect_location)) + log.info("Redirecting %s -> %s", url, redirect_location) return self.urlopen(method, redirect_location, **kw) diff --git a/lib/requests/packages/urllib3/response.py b/lib/requests/packages/urllib3/response.py index 8f2a1b5c29..ac1b2f19e3 100644 --- a/lib/requests/packages/urllib3/response.py +++ b/lib/requests/packages/urllib3/response.py @@ -221,6 +221,8 @@ def _error_catcher(self): On exit, release the connection back to the pool. """ + clean_exit = False + try: try: yield @@ -243,20 +245,27 @@ def _error_catcher(self): # This includes IncompleteRead. raise ProtocolError('Connection broken: %r' % e, e) - except Exception: - # The response may not be closed but we're not going to use it anymore - # so close it now to ensure that the connection is released back to the pool. - if self._original_response and not self._original_response.isclosed(): - self._original_response.close() - - # Closing the response may not actually be sufficient to close - # everything, so if we have a hold of the connection close that - # too. - if self._connection is not None: - self._connection.close() - - raise + # If no exception is thrown, we should avoid cleaning up + # unnecessarily. + clean_exit = True finally: + # If we didn't terminate cleanly, we need to throw away our + # connection. + if not clean_exit: + # The response may not be closed but we're not going to use it + # anymore so close it now to ensure that the connection is + # released back to the pool. + if self._original_response: + self._original_response.close() + + # Closing the response may not actually be sufficient to close + # everything, so if we have a hold of the connection close that + # too. + if self._connection: + self._connection.close() + + # If we hold the original response but it's closed now, we should + # return the connection back to the pool. if self._original_response and self._original_response.isclosed(): self.release_conn() @@ -387,6 +396,9 @@ def close(self): if not self.closed: self._fp.close() + if self._connection: + self._connection.close() + @property def closed(self): if self._fp is None: diff --git a/lib/requests/packages/urllib3/util/__init__.py b/lib/requests/packages/urllib3/util/__init__.py index c6c6243cf1..4778cf9962 100644 --- a/lib/requests/packages/urllib3/util/__init__.py +++ b/lib/requests/packages/urllib3/util/__init__.py @@ -6,6 +6,7 @@ from .ssl_ import ( SSLContext, HAS_SNI, + IS_PYOPENSSL, assert_fingerprint, resolve_cert_reqs, resolve_ssl_version, @@ -26,6 +27,7 @@ __all__ = ( 'HAS_SNI', + 'IS_PYOPENSSL', 'SSLContext', 'Retry', 'Timeout', diff --git a/lib/requests/packages/urllib3/util/response.py b/lib/requests/packages/urllib3/util/response.py index bc7232720d..0b5c75c13c 100644 --- a/lib/requests/packages/urllib3/util/response.py +++ b/lib/requests/packages/urllib3/util/response.py @@ -61,7 +61,7 @@ def assert_header_parsing(headers): def is_response_to_head(response): """ - Checks, wether a the request of a response has been a HEAD-request. + Checks whether the request of a response has been a HEAD-request. Handles the quirks of AppEngine. :param conn: diff --git a/lib/requests/packages/urllib3/util/retry.py b/lib/requests/packages/urllib3/util/retry.py index 03a01249dd..2d3aa20d0a 100644 --- a/lib/requests/packages/urllib3/util/retry.py +++ b/lib/requests/packages/urllib3/util/retry.py @@ -102,6 +102,11 @@ class Retry(object): :param bool raise_on_redirect: Whether, if the number of redirects is exhausted, to raise a MaxRetryError, or to return a response with a response code in the 3xx range. + + :param bool raise_on_status: Similar meaning to ``raise_on_redirect``: + whether we should raise an exception, or return a response, + if status falls in ``status_forcelist`` range and retries have + been exhausted. """ DEFAULT_METHOD_WHITELIST = frozenset([ @@ -112,7 +117,8 @@ class Retry(object): def __init__(self, total=10, connect=None, read=None, redirect=None, method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None, - backoff_factor=0, raise_on_redirect=True, _observed_errors=0): + backoff_factor=0, raise_on_redirect=True, raise_on_status=True, + _observed_errors=0): self.total = total self.connect = connect @@ -127,6 +133,7 @@ def __init__(self, total=10, connect=None, read=None, redirect=None, self.method_whitelist = method_whitelist self.backoff_factor = backoff_factor self.raise_on_redirect = raise_on_redirect + self.raise_on_status = raise_on_status self._observed_errors = _observed_errors # TODO: use .history instead? def new(self, **kw): @@ -137,6 +144,7 @@ def new(self, **kw): status_forcelist=self.status_forcelist, backoff_factor=self.backoff_factor, raise_on_redirect=self.raise_on_redirect, + raise_on_status=self.raise_on_status, _observed_errors=self._observed_errors, ) params.update(kw) @@ -153,7 +161,7 @@ def from_int(cls, retries, redirect=True, default=None): redirect = bool(redirect) and None new_retries = cls(retries, redirect=redirect) - log.debug("Converted retries value: %r -> %r" % (retries, new_retries)) + log.debug("Converted retries value: %r -> %r", retries, new_retries) return new_retries def get_backoff_time(self): @@ -272,7 +280,7 @@ def increment(self, method=None, url=None, response=None, error=None, if new_retry.is_exhausted(): raise MaxRetryError(_pool, url, error or ResponseError(cause)) - log.debug("Incremented Retry for (url='%s'): %r" % (url, new_retry)) + log.debug("Incremented Retry for (url='%s'): %r", url, new_retry) return new_retry diff --git a/lib/requests/packages/urllib3/util/ssl_.py b/lib/requests/packages/urllib3/util/ssl_.py index 67f83441e2..e8d9e7d292 100644 --- a/lib/requests/packages/urllib3/util/ssl_.py +++ b/lib/requests/packages/urllib3/util/ssl_.py @@ -12,6 +12,7 @@ SSLContext = None HAS_SNI = False create_default_context = None +IS_PYOPENSSL = False # Maps the length of a digest to a possible hash function producing this digest HASHFUNC_MAP = { @@ -110,11 +111,12 @@ def set_ciphers(self, cipher_suite): ) self.ciphers = cipher_suite - def wrap_socket(self, socket, server_hostname=None): + def wrap_socket(self, socket, server_hostname=None, server_side=False): warnings.warn( 'A true SSLContext object is not available. This prevents ' 'urllib3 from configuring SSL appropriately and may cause ' - 'certain SSL connections to fail. For more information, see ' + 'certain SSL connections to fail. You can upgrade to a newer ' + 'version of Python to solve this. For more information, see ' 'https://urllib3.readthedocs.org/en/latest/security.html' '#insecureplatformwarning.', InsecurePlatformWarning @@ -125,6 +127,7 @@ def wrap_socket(self, socket, server_hostname=None): 'ca_certs': self.ca_certs, 'cert_reqs': self.verify_mode, 'ssl_version': self.protocol, + 'server_side': server_side, } if self.supports_set_ciphers: # Platform-specific: Python 2.7+ return wrap_socket(socket, ciphers=self.ciphers, **kwargs) @@ -308,8 +311,8 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, 'An HTTPS request has been made, but the SNI (Subject Name ' 'Indication) extension to TLS is not available on this platform. ' 'This may cause the server to present an incorrect TLS ' - 'certificate, which can cause validation failures. For more ' - 'information, see ' + 'certificate, which can cause validation failures. You can upgrade to ' + 'a newer version of Python to solve this. For more information, see ' 'https://urllib3.readthedocs.org/en/latest/security.html' '#snimissingwarning.', SNIMissingWarning diff --git a/lib/requests/sessions.py b/lib/requests/sessions.py index 639668f22b..45be9733e5 100644 --- a/lib/requests/sessions.py +++ b/lib/requests/sessions.py @@ -116,7 +116,6 @@ def resolve_redirects(self, resp, req, stream=False, timeout=None, resp.close() url = resp.headers['location'] - method = req.method # Handle redirection without scheme (see: RFC 1808 Section 4) if url.startswith('//'): @@ -140,22 +139,7 @@ def resolve_redirects(self, resp, req, stream=False, timeout=None, if resp.is_permanent_redirect and req.url != prepared_request.url: self.redirect_cache[req.url] = prepared_request.url - # http://tools.ietf.org/html/rfc7231#section-6.4.4 - if (resp.status_code == codes.see_other and - method != 'HEAD'): - method = 'GET' - - # Do what the browsers do, despite standards... - # First, turn 302s into GETs. - if resp.status_code == codes.found and method != 'HEAD': - method = 'GET' - - # Second, if a POST is responded to with a 301, turn it into a GET. - # This bizarre behaviour is explained in Issue 1704. - if resp.status_code == codes.moved and method == 'POST': - method = 'GET' - - prepared_request.method = method + self.rebuild_method(prepared_request, resp) # https://github.com/kennethreitz/requests/issues/1084 if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect): @@ -262,6 +246,28 @@ def rebuild_proxies(self, prepared_request, proxies): return new_proxies + def rebuild_method(self, prepared_request, response): + """When being redirected we may want to change the method of the request + based on certain specs or browser behavior. + """ + method = prepared_request.method + + # http://tools.ietf.org/html/rfc7231#section-6.4.4 + if response.status_code == codes.see_other and method != 'HEAD': + method = 'GET' + + # Do what the browsers do, despite standards... + # First, turn 302s into GETs. + if response.status_code == codes.found and method != 'HEAD': + method = 'GET' + + # Second, if a POST is responded to with a 301, turn it into a GET. + # This bizarre behaviour is explained in Issue 1704. + if response.status_code == codes.moved and method == 'POST': + method = 'GET' + + prepared_request.method = method + class Session(SessionRedirectMixin): """A Requests session. @@ -437,7 +443,8 @@ def request(self, method, url, A CA_BUNDLE path can also be provided. Defaults to ``True``. :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. - """ + :rtype: requests.Response + """ # Create the Request. req = Request( method = method.upper(), @@ -550,7 +557,7 @@ def send(self, request, **kwargs): # It's possible that users might accidentally send a Request object. # Guard against that specific failure case. - if not isinstance(request, PreparedRequest): + if isinstance(request, Request): raise ValueError('You can only send PreparedRequests.') # Set up variables needed for resolve_redirects and dispatching of hooks diff --git a/lib/requests/status_codes.py b/lib/requests/status_codes.py index a852574a45..0137c91d96 100644 --- a/lib/requests/status_codes.py +++ b/lib/requests/status_codes.py @@ -53,6 +53,7 @@ 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'), 417: ('expectation_failed',), 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'), + 421: ('misdirected_request',), 422: ('unprocessable_entity', 'unprocessable'), 423: ('locked',), 424: ('failed_dependency', 'dependency'), diff --git a/lib/requests/structures.py b/lib/requests/structures.py index 3e5f2faa2e..991056e476 100644 --- a/lib/requests/structures.py +++ b/lib/requests/structures.py @@ -10,6 +10,8 @@ import collections +from .compat import OrderedDict + class CaseInsensitiveDict(collections.MutableMapping): """ @@ -40,7 +42,7 @@ class CaseInsensitiveDict(collections.MutableMapping): """ def __init__(self, data=None, **kwargs): - self._store = dict() + self._store = OrderedDict() if data is None: data = {} self.update(data, **kwargs) diff --git a/lib/requests/utils.py b/lib/requests/utils.py index 16f7b98f16..c08448ccb1 100644 --- a/lib/requests/utils.py +++ b/lib/requests/utils.py @@ -14,9 +14,7 @@ import collections import io import os -import platform import re -import sys import socket import struct import warnings @@ -564,6 +562,7 @@ def should_bypass_proxies(url): return False + def get_environ_proxies(url): """Return a dict of environment proxies.""" if should_bypass_proxies(url): @@ -571,6 +570,7 @@ def get_environ_proxies(url): else: return getproxies() + def select_proxy(url, proxies): """Select a proxy for the url, if applicable. @@ -579,11 +579,15 @@ def select_proxy(url, proxies): """ proxies = proxies or {} urlparts = urlparse(url) - proxy = proxies.get(urlparts.scheme+'://'+urlparts.hostname) + if urlparts.hostname is None: + proxy = None + else: + proxy = proxies.get(urlparts.scheme+'://'+urlparts.hostname) if proxy is None: proxy = proxies.get(urlparts.scheme) return proxy + def default_user_agent(name="python-requests"): """Return a string representing the default user agent.""" return '%s/%s' % (name, __version__) @@ -607,21 +611,19 @@ def parse_header_links(value): links = [] - replace_chars = " '\"" + replace_chars = ' \'"' - for val in re.split(", *<", value): + for val in re.split(', *<', value): try: - url, params = val.split(";", 1) + url, params = val.split(';', 1) except ValueError: url, params = val, '' - link = {} - - link["url"] = url.strip("<> '\"") + link = {'url': url.strip('<> \'"')} - for param in params.split(";"): + for param in params.split(';'): try: - key, value = param.split("=") + key, value = param.split('=') except ValueError: break @@ -668,8 +670,8 @@ def guess_json_utf(data): def prepend_scheme_if_needed(url, new_scheme): - '''Given a URL that may or may not have a scheme, prepend the given scheme. - Does not replace a present scheme with the one provided as an argument.''' + """Given a URL that may or may not have a scheme, prepend the given scheme. + Does not replace a present scheme with the one provided as an argument.""" scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme) # urlparse is a finicky beast, and sometimes decides that there isn't a @@ -700,8 +702,6 @@ def to_native_string(string, encoding='ascii'): string in the native string type, encoding and decoding where necessary. This assumes ASCII unless told otherwise. """ - out = None - if isinstance(string, builtin_str): out = string else: diff --git a/lib/singledispatch.py b/lib/singledispatch.py new file mode 100644 index 0000000000..87603fd055 --- /dev/null +++ b/lib/singledispatch.py @@ -0,0 +1,219 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +__all__ = ['singledispatch'] + +from functools import update_wrapper +from weakref import WeakKeyDictionary +from singledispatch_helpers import MappingProxyType, get_cache_token + +################################################################################ +### singledispatch() - single-dispatch generic function decorator +################################################################################ + +def _c3_merge(sequences): + """Merges MROs in *sequences* to a single MRO using the C3 algorithm. + + Adapted from http://www.python.org/download/releases/2.3/mro/. + + """ + result = [] + while True: + sequences = [s for s in sequences if s] # purge empty sequences + if not sequences: + return result + for s1 in sequences: # find merge candidates among seq heads + candidate = s1[0] + for s2 in sequences: + if candidate in s2[1:]: + candidate = None + break # reject the current head, it appears later + else: + break + if not candidate: + raise RuntimeError("Inconsistent hierarchy") + result.append(candidate) + # remove the chosen candidate + for seq in sequences: + if seq[0] == candidate: + del seq[0] + +def _c3_mro(cls, abcs=None): + """Computes the method resolution order using extended C3 linearization. + + If no *abcs* are given, the algorithm works exactly like the built-in C3 + linearization used for method resolution. + + If given, *abcs* is a list of abstract base classes that should be inserted + into the resulting MRO. Unrelated ABCs are ignored and don't end up in the + result. The algorithm inserts ABCs where their functionality is introduced, + i.e. issubclass(cls, abc) returns True for the class itself but returns + False for all its direct base classes. Implicit ABCs for a given class + (either registered or inferred from the presence of a special method like + __len__) are inserted directly after the last ABC explicitly listed in the + MRO of said class. If two implicit ABCs end up next to each other in the + resulting MRO, their ordering depends on the order of types in *abcs*. + + """ + for i, base in enumerate(reversed(cls.__bases__)): + if hasattr(base, '__abstractmethods__'): + boundary = len(cls.__bases__) - i + break # Bases up to the last explicit ABC are considered first. + else: + boundary = 0 + abcs = list(abcs) if abcs else [] + explicit_bases = list(cls.__bases__[:boundary]) + abstract_bases = [] + other_bases = list(cls.__bases__[boundary:]) + for base in abcs: + if issubclass(cls, base) and not any( + issubclass(b, base) for b in cls.__bases__ + ): + # If *cls* is the class that introduces behaviour described by + # an ABC *base*, insert said ABC to its MRO. + abstract_bases.append(base) + for base in abstract_bases: + abcs.remove(base) + explicit_c3_mros = [_c3_mro(base, abcs=abcs) for base in explicit_bases] + abstract_c3_mros = [_c3_mro(base, abcs=abcs) for base in abstract_bases] + other_c3_mros = [_c3_mro(base, abcs=abcs) for base in other_bases] + return _c3_merge( + [[cls]] + + explicit_c3_mros + abstract_c3_mros + other_c3_mros + + [explicit_bases] + [abstract_bases] + [other_bases] + ) + +def _compose_mro(cls, types): + """Calculates the method resolution order for a given class *cls*. + + Includes relevant abstract base classes (with their respective bases) from + the *types* iterable. Uses a modified C3 linearization algorithm. + + """ + bases = set(cls.__mro__) + # Remove entries which are already present in the __mro__ or unrelated. + def is_related(typ): + return (typ not in bases and hasattr(typ, '__mro__') + and issubclass(cls, typ)) + types = [n for n in types if is_related(n)] + # Remove entries which are strict bases of other entries (they will end up + # in the MRO anyway. + def is_strict_base(typ): + for other in types: + if typ != other and typ in other.__mro__: + return True + return False + types = [n for n in types if not is_strict_base(n)] + # Subclasses of the ABCs in *types* which are also implemented by + # *cls* can be used to stabilize ABC ordering. + type_set = set(types) + mro = [] + for typ in types: + found = [] + for sub in typ.__subclasses__(): + if sub not in bases and issubclass(cls, sub): + found.append([s for s in sub.__mro__ if s in type_set]) + if not found: + mro.append(typ) + continue + # Favor subclasses with the biggest number of useful bases + found.sort(key=len, reverse=True) + for sub in found: + for subcls in sub: + if subcls not in mro: + mro.append(subcls) + return _c3_mro(cls, abcs=mro) + +def _find_impl(cls, registry): + """Returns the best matching implementation from *registry* for type *cls*. + + Where there is no registered implementation for a specific type, its method + resolution order is used to find a more generic implementation. + + Note: if *registry* does not contain an implementation for the base + *object* type, this function may return None. + + """ + mro = _compose_mro(cls, registry.keys()) + match = None + for t in mro: + if match is not None: + # If *match* is an implicit ABC but there is another unrelated, + # equally matching implicit ABC, refuse the temptation to guess. + if (t in registry and t not in cls.__mro__ + and match not in cls.__mro__ + and not issubclass(match, t)): + raise RuntimeError("Ambiguous dispatch: {0} or {1}".format( + match, t)) + break + if t in registry: + match = t + return registry.get(match) + +def singledispatch(func): + """Single-dispatch generic function decorator. + + Transforms a function into a generic function, which can have different + behaviours depending upon the type of its first argument. The decorated + function acts as the default implementation, and additional + implementations can be registered using the register() attribute of the + generic function. + + """ + registry = {} + dispatch_cache = WeakKeyDictionary() + def ns(): pass + ns.cache_token = None + + def dispatch(cls): + """generic_func.dispatch(cls) -> + + Runs the dispatch algorithm to return the best available implementation + for the given *cls* registered on *generic_func*. + + """ + if ns.cache_token is not None: + current_token = get_cache_token() + if ns.cache_token != current_token: + dispatch_cache.clear() + ns.cache_token = current_token + try: + impl = dispatch_cache[cls] + except KeyError: + try: + impl = registry[cls] + except KeyError: + impl = _find_impl(cls, registry) + dispatch_cache[cls] = impl + return impl + + def register(cls, func=None): + """generic_func.register(cls, func) -> func + + Registers a new implementation for the given *cls* on a *generic_func*. + + """ + if func is None: + return lambda f: register(cls, f) + registry[cls] = func + if ns.cache_token is None and hasattr(cls, '__abstractmethods__'): + ns.cache_token = get_cache_token() + dispatch_cache.clear() + return func + + def wrapper(*args, **kw): + return dispatch(args[0].__class__)(*args, **kw) + + registry[object] = func + wrapper.register = register + wrapper.dispatch = dispatch + wrapper.registry = MappingProxyType(registry) + wrapper._clear_cache = dispatch_cache.clear + update_wrapper(wrapper, func) + return wrapper + diff --git a/lib/singledispatch_helpers.py b/lib/singledispatch_helpers.py new file mode 100644 index 0000000000..8fcdce4046 --- /dev/null +++ b/lib/singledispatch_helpers.py @@ -0,0 +1,170 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from abc import ABCMeta +from collections import MutableMapping +import sys +try: + from collections import UserDict +except ImportError: + from UserDict import UserDict +try: + from collections import OrderedDict +except ImportError: + from ordereddict import OrderedDict +try: + from thread import get_ident +except ImportError: + try: + from _thread import get_ident + except ImportError: + from _dummy_thread import get_ident + + +def recursive_repr(fillvalue='...'): + 'Decorator to make a repr function return fillvalue for a recursive call' + + def decorating_function(user_function): + repr_running = set() + + def wrapper(self): + key = id(self), get_ident() + if key in repr_running: + return fillvalue + repr_running.add(key) + try: + result = user_function(self) + finally: + repr_running.discard(key) + return result + + # Can't use functools.wraps() here because of bootstrap issues + wrapper.__module__ = getattr(user_function, '__module__') + wrapper.__doc__ = getattr(user_function, '__doc__') + wrapper.__name__ = getattr(user_function, '__name__') + wrapper.__annotations__ = getattr(user_function, '__annotations__', {}) + return wrapper + + return decorating_function + + +class ChainMap(MutableMapping): + ''' A ChainMap groups multiple dicts (or other mappings) together + to create a single, updateable view. + + The underlying mappings are stored in a list. That list is public and can + accessed or updated using the *maps* attribute. There is no other state. + + Lookups search the underlying mappings successively until a key is found. + In contrast, writes, updates, and deletions only operate on the first + mapping. + + ''' + + def __init__(self, *maps): + '''Initialize a ChainMap by setting *maps* to the given mappings. + If no mappings are provided, a single empty dictionary is used. + + ''' + self.maps = list(maps) or [{}] # always at least one map + + def __missing__(self, key): + raise KeyError(key) + + def __getitem__(self, key): + for mapping in self.maps: + try: + return mapping[key] # can't use 'key in mapping' with defaultdict + except KeyError: + pass + return self.__missing__(key) # support subclasses that define __missing__ + + def get(self, key, default=None): + return self[key] if key in self else default + + def __len__(self): + return len(set().union(*self.maps)) # reuses stored hash values if possible + + def __iter__(self): + return iter(set().union(*self.maps)) + + def __contains__(self, key): + return any(key in m for m in self.maps) + + @recursive_repr() + def __repr__(self): + return '{0.__class__.__name__}({1})'.format( + self, ', '.join(map(repr, self.maps))) + + @classmethod + def fromkeys(cls, iterable, *args): + 'Create a ChainMap with a single dict created from the iterable.' + return cls(dict.fromkeys(iterable, *args)) + + def copy(self): + 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' + return self.__class__(self.maps[0].copy(), *self.maps[1:]) + + __copy__ = copy + + def new_child(self): # like Django's Context.push() + 'New ChainMap with a new dict followed by all previous maps.' + return self.__class__({}, *self.maps) + + @property + def parents(self): # like Django's Context.pop() + 'New ChainMap from maps[1:].' + return self.__class__(*self.maps[1:]) + + def __setitem__(self, key, value): + self.maps[0][key] = value + + def __delitem__(self, key): + try: + del self.maps[0][key] + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def popitem(self): + 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' + try: + return self.maps[0].popitem() + except KeyError: + raise KeyError('No keys found in the first mapping.') + + def pop(self, key, *args): + 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' + try: + return self.maps[0].pop(key, *args) + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def clear(self): + 'Clear maps[0], leaving maps[1:] intact.' + self.maps[0].clear() + + +class MappingProxyType(UserDict): + def __init__(self, data): + UserDict.__init__(self) + self.data = data + + +def get_cache_token(): + return ABCMeta._abc_invalidation_counter + + + +class Support(object): + def dummy(self): + pass + + def cpython_only(self, func): + if 'PyPy' in sys.version: + return self.dummy + return func diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py index 67155e0f2b..89033a3fb5 100644 --- a/lib/sqlalchemy/__init__.py +++ b/lib/sqlalchemy/__init__.py @@ -1,5 +1,6 @@ # sqlalchemy/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -14,6 +15,7 @@ case, cast, collate, + column, delete, desc, distinct, @@ -23,6 +25,7 @@ extract, false, func, + funcfilter, insert, intersect, intersect_all, @@ -38,6 +41,7 @@ over, select, subquery, + table, text, true, tuple_, @@ -116,7 +120,8 @@ from .inspection import inspect from .engine import create_engine, engine_from_config -__version__ = '0.9.4' +__version__ = '1.0.13' + def __go(lcls): global __all__ @@ -127,7 +132,7 @@ def __go(lcls): import inspect as _inspect __all__ = sorted(name for name, obj in lcls.items() - if not (name.startswith('_') or _inspect.ismodule(obj))) + if not (name.startswith('_') or _inspect.ismodule(obj))) _sa_util.dependencies.resolve_all("sqlalchemy") -__go(locals()) \ No newline at end of file +__go(locals()) diff --git a/lib/sqlalchemy/cextension/processors.c b/lib/sqlalchemy/cextension/processors.c deleted file mode 100644 index d568177634..0000000000 --- a/lib/sqlalchemy/cextension/processors.c +++ /dev/null @@ -1,706 +0,0 @@ -/* -processors.c -Copyright (C) 2010-2014 the SQLAlchemy authors and contributors -Copyright (C) 2010-2011 Gaetan de Menten gdementen@gmail.com - -This module is part of SQLAlchemy and is released under -the MIT License: http://www.opensource.org/licenses/mit-license.php -*/ - -#include -#include - -#define MODULE_NAME "cprocessors" -#define MODULE_DOC "Module containing C versions of data processing functions." - -#if PY_VERSION_HEX < 0x02050000 && !defined(PY_SSIZE_T_MIN) -typedef int Py_ssize_t; -#define PY_SSIZE_T_MAX INT_MAX -#define PY_SSIZE_T_MIN INT_MIN -#endif - -static PyObject * -int_to_boolean(PyObject *self, PyObject *arg) -{ - long l = 0; - PyObject *res; - - if (arg == Py_None) - Py_RETURN_NONE; - - -#if PY_MAJOR_VERSION >= 3 - l = PyLong_AsLong(arg); -#else - l = PyInt_AsLong(arg); -#endif - if (l == 0) { - res = Py_False; - } else if (l == 1) { - res = Py_True; - } else if ((l == -1) && PyErr_Occurred()) { - /* -1 can be either the actual value, or an error flag. */ - return NULL; - } else { - PyErr_SetString(PyExc_ValueError, - "int_to_boolean only accepts None, 0 or 1"); - return NULL; - } - - Py_INCREF(res); - return res; -} - -static PyObject * -to_str(PyObject *self, PyObject *arg) -{ - if (arg == Py_None) - Py_RETURN_NONE; - - return PyObject_Str(arg); -} - -static PyObject * -to_float(PyObject *self, PyObject *arg) -{ - if (arg == Py_None) - Py_RETURN_NONE; - - return PyNumber_Float(arg); -} - -static PyObject * -str_to_datetime(PyObject *self, PyObject *arg) -{ -#if PY_MAJOR_VERSION >= 3 - PyObject *bytes; - PyObject *err_bytes; -#endif - const char *str; - int numparsed; - unsigned int year, month, day, hour, minute, second, microsecond = 0; - PyObject *err_repr; - - if (arg == Py_None) - Py_RETURN_NONE; - -#if PY_MAJOR_VERSION >= 3 - bytes = PyUnicode_AsASCIIString(arg); - if (bytes == NULL) - str = NULL; - else - str = PyBytes_AS_STRING(bytes); -#else - str = PyString_AsString(arg); -#endif - if (str == NULL) { - err_repr = PyObject_Repr(arg); - if (err_repr == NULL) - return NULL; -#if PY_MAJOR_VERSION >= 3 - err_bytes = PyUnicode_AsASCIIString(err_repr); - if (err_bytes == NULL) - return NULL; - PyErr_Format( - PyExc_ValueError, - "Couldn't parse datetime string '%.200s' " - "- value is not a string.", - PyBytes_AS_STRING(err_bytes)); - Py_DECREF(err_bytes); -#else - PyErr_Format( - PyExc_ValueError, - "Couldn't parse datetime string '%.200s' " - "- value is not a string.", - PyString_AsString(err_repr)); -#endif - Py_DECREF(err_repr); - return NULL; - } - - /* microseconds are optional */ - /* - TODO: this is slightly less picky than the Python version which would - not accept "2000-01-01 00:00:00.". I don't know which is better, but they - should be coherent. - */ - numparsed = sscanf(str, "%4u-%2u-%2u %2u:%2u:%2u.%6u", &year, &month, &day, - &hour, &minute, &second, µsecond); -#if PY_MAJOR_VERSION >= 3 - Py_DECREF(bytes); -#endif - if (numparsed < 6) { - err_repr = PyObject_Repr(arg); - if (err_repr == NULL) - return NULL; -#if PY_MAJOR_VERSION >= 3 - err_bytes = PyUnicode_AsASCIIString(err_repr); - if (err_bytes == NULL) - return NULL; - PyErr_Format( - PyExc_ValueError, - "Couldn't parse datetime string: %.200s", - PyBytes_AS_STRING(err_bytes)); - Py_DECREF(err_bytes); -#else - PyErr_Format( - PyExc_ValueError, - "Couldn't parse datetime string: %.200s", - PyString_AsString(err_repr)); -#endif - Py_DECREF(err_repr); - return NULL; - } - return PyDateTime_FromDateAndTime(year, month, day, - hour, minute, second, microsecond); -} - -static PyObject * -str_to_time(PyObject *self, PyObject *arg) -{ -#if PY_MAJOR_VERSION >= 3 - PyObject *bytes; - PyObject *err_bytes; -#endif - const char *str; - int numparsed; - unsigned int hour, minute, second, microsecond = 0; - PyObject *err_repr; - - if (arg == Py_None) - Py_RETURN_NONE; - -#if PY_MAJOR_VERSION >= 3 - bytes = PyUnicode_AsASCIIString(arg); - if (bytes == NULL) - str = NULL; - else - str = PyBytes_AS_STRING(bytes); -#else - str = PyString_AsString(arg); -#endif - if (str == NULL) { - err_repr = PyObject_Repr(arg); - if (err_repr == NULL) - return NULL; - -#if PY_MAJOR_VERSION >= 3 - err_bytes = PyUnicode_AsASCIIString(err_repr); - if (err_bytes == NULL) - return NULL; - PyErr_Format( - PyExc_ValueError, - "Couldn't parse time string '%.200s' - value is not a string.", - PyBytes_AS_STRING(err_bytes)); - Py_DECREF(err_bytes); -#else - PyErr_Format( - PyExc_ValueError, - "Couldn't parse time string '%.200s' - value is not a string.", - PyString_AsString(err_repr)); -#endif - Py_DECREF(err_repr); - return NULL; - } - - /* microseconds are optional */ - /* - TODO: this is slightly less picky than the Python version which would - not accept "00:00:00.". I don't know which is better, but they should be - coherent. - */ - numparsed = sscanf(str, "%2u:%2u:%2u.%6u", &hour, &minute, &second, - µsecond); -#if PY_MAJOR_VERSION >= 3 - Py_DECREF(bytes); -#endif - if (numparsed < 3) { - err_repr = PyObject_Repr(arg); - if (err_repr == NULL) - return NULL; -#if PY_MAJOR_VERSION >= 3 - err_bytes = PyUnicode_AsASCIIString(err_repr); - if (err_bytes == NULL) - return NULL; - PyErr_Format( - PyExc_ValueError, - "Couldn't parse time string: %.200s", - PyBytes_AS_STRING(err_bytes)); - Py_DECREF(err_bytes); -#else - PyErr_Format( - PyExc_ValueError, - "Couldn't parse time string: %.200s", - PyString_AsString(err_repr)); -#endif - Py_DECREF(err_repr); - return NULL; - } - return PyTime_FromTime(hour, minute, second, microsecond); -} - -static PyObject * -str_to_date(PyObject *self, PyObject *arg) -{ -#if PY_MAJOR_VERSION >= 3 - PyObject *bytes; - PyObject *err_bytes; -#endif - const char *str; - int numparsed; - unsigned int year, month, day; - PyObject *err_repr; - - if (arg == Py_None) - Py_RETURN_NONE; - -#if PY_MAJOR_VERSION >= 3 - bytes = PyUnicode_AsASCIIString(arg); - if (bytes == NULL) - str = NULL; - else - str = PyBytes_AS_STRING(bytes); -#else - str = PyString_AsString(arg); -#endif - if (str == NULL) { - err_repr = PyObject_Repr(arg); - if (err_repr == NULL) - return NULL; -#if PY_MAJOR_VERSION >= 3 - err_bytes = PyUnicode_AsASCIIString(err_repr); - if (err_bytes == NULL) - return NULL; - PyErr_Format( - PyExc_ValueError, - "Couldn't parse date string '%.200s' - value is not a string.", - PyBytes_AS_STRING(err_bytes)); - Py_DECREF(err_bytes); -#else - PyErr_Format( - PyExc_ValueError, - "Couldn't parse date string '%.200s' - value is not a string.", - PyString_AsString(err_repr)); -#endif - Py_DECREF(err_repr); - return NULL; - } - - numparsed = sscanf(str, "%4u-%2u-%2u", &year, &month, &day); -#if PY_MAJOR_VERSION >= 3 - Py_DECREF(bytes); -#endif - if (numparsed != 3) { - err_repr = PyObject_Repr(arg); - if (err_repr == NULL) - return NULL; -#if PY_MAJOR_VERSION >= 3 - err_bytes = PyUnicode_AsASCIIString(err_repr); - if (err_bytes == NULL) - return NULL; - PyErr_Format( - PyExc_ValueError, - "Couldn't parse date string: %.200s", - PyBytes_AS_STRING(err_bytes)); - Py_DECREF(err_bytes); -#else - PyErr_Format( - PyExc_ValueError, - "Couldn't parse date string: %.200s", - PyString_AsString(err_repr)); -#endif - Py_DECREF(err_repr); - return NULL; - } - return PyDate_FromDate(year, month, day); -} - - -/*********** - * Structs * - ***********/ - -typedef struct { - PyObject_HEAD - PyObject *encoding; - PyObject *errors; -} UnicodeResultProcessor; - -typedef struct { - PyObject_HEAD - PyObject *type; - PyObject *format; -} DecimalResultProcessor; - - - -/************************** - * UnicodeResultProcessor * - **************************/ - -static int -UnicodeResultProcessor_init(UnicodeResultProcessor *self, PyObject *args, - PyObject *kwds) -{ - PyObject *encoding, *errors = NULL; - static char *kwlist[] = {"encoding", "errors", NULL}; - -#if PY_MAJOR_VERSION >= 3 - if (!PyArg_ParseTupleAndKeywords(args, kwds, "U|U:__init__", kwlist, - &encoding, &errors)) - return -1; -#else - if (!PyArg_ParseTupleAndKeywords(args, kwds, "S|S:__init__", kwlist, - &encoding, &errors)) - return -1; -#endif - -#if PY_MAJOR_VERSION >= 3 - encoding = PyUnicode_AsASCIIString(encoding); -#else - Py_INCREF(encoding); -#endif - self->encoding = encoding; - - if (errors) { -#if PY_MAJOR_VERSION >= 3 - errors = PyUnicode_AsASCIIString(errors); -#else - Py_INCREF(errors); -#endif - } else { -#if PY_MAJOR_VERSION >= 3 - errors = PyBytes_FromString("strict"); -#else - errors = PyString_FromString("strict"); -#endif - if (errors == NULL) - return -1; - } - self->errors = errors; - - return 0; -} - -static PyObject * -UnicodeResultProcessor_process(UnicodeResultProcessor *self, PyObject *value) -{ - const char *encoding, *errors; - char *str; - Py_ssize_t len; - - if (value == Py_None) - Py_RETURN_NONE; - -#if PY_MAJOR_VERSION >= 3 - if (PyBytes_AsStringAndSize(value, &str, &len)) - return NULL; - - encoding = PyBytes_AS_STRING(self->encoding); - errors = PyBytes_AS_STRING(self->errors); -#else - if (PyString_AsStringAndSize(value, &str, &len)) - return NULL; - - encoding = PyString_AS_STRING(self->encoding); - errors = PyString_AS_STRING(self->errors); -#endif - - return PyUnicode_Decode(str, len, encoding, errors); -} - -static PyObject * -UnicodeResultProcessor_conditional_process(UnicodeResultProcessor *self, PyObject *value) -{ - const char *encoding, *errors; - char *str; - Py_ssize_t len; - - if (value == Py_None) - Py_RETURN_NONE; - -#if PY_MAJOR_VERSION >= 3 - if (PyUnicode_Check(value) == 1) { - Py_INCREF(value); - return value; - } - - if (PyBytes_AsStringAndSize(value, &str, &len)) - return NULL; - - encoding = PyBytes_AS_STRING(self->encoding); - errors = PyBytes_AS_STRING(self->errors); -#else - - if (PyUnicode_Check(value) == 1) { - Py_INCREF(value); - return value; - } - - if (PyString_AsStringAndSize(value, &str, &len)) - return NULL; - - - encoding = PyString_AS_STRING(self->encoding); - errors = PyString_AS_STRING(self->errors); -#endif - - return PyUnicode_Decode(str, len, encoding, errors); -} - -static void -UnicodeResultProcessor_dealloc(UnicodeResultProcessor *self) -{ - Py_XDECREF(self->encoding); - Py_XDECREF(self->errors); -#if PY_MAJOR_VERSION >= 3 - Py_TYPE(self)->tp_free((PyObject*)self); -#else - self->ob_type->tp_free((PyObject*)self); -#endif -} - -static PyMethodDef UnicodeResultProcessor_methods[] = { - {"process", (PyCFunction)UnicodeResultProcessor_process, METH_O, - "The value processor itself."}, - {"conditional_process", (PyCFunction)UnicodeResultProcessor_conditional_process, METH_O, - "Conditional version of the value processor."}, - {NULL} /* Sentinel */ -}; - -static PyTypeObject UnicodeResultProcessorType = { - PyVarObject_HEAD_INIT(NULL, 0) - "sqlalchemy.cprocessors.UnicodeResultProcessor", /* tp_name */ - sizeof(UnicodeResultProcessor), /* tp_basicsize */ - 0, /* tp_itemsize */ - (destructor)UnicodeResultProcessor_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - "UnicodeResultProcessor objects", /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - UnicodeResultProcessor_methods, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - (initproc)UnicodeResultProcessor_init, /* tp_init */ - 0, /* tp_alloc */ - 0, /* tp_new */ -}; - -/************************** - * DecimalResultProcessor * - **************************/ - -static int -DecimalResultProcessor_init(DecimalResultProcessor *self, PyObject *args, - PyObject *kwds) -{ - PyObject *type, *format; - -#if PY_MAJOR_VERSION >= 3 - if (!PyArg_ParseTuple(args, "OU", &type, &format)) -#else - if (!PyArg_ParseTuple(args, "OS", &type, &format)) -#endif - return -1; - - Py_INCREF(type); - self->type = type; - - Py_INCREF(format); - self->format = format; - - return 0; -} - -static PyObject * -DecimalResultProcessor_process(DecimalResultProcessor *self, PyObject *value) -{ - PyObject *str, *result, *args; - - if (value == Py_None) - Py_RETURN_NONE; - - /* Decimal does not accept float values directly */ - /* SQLite can also give us an integer here (see [ticket:2432]) */ - /* XXX: starting with Python 3.1, we could use Decimal.from_float(f), - but the result wouldn't be the same */ - - args = PyTuple_Pack(1, value); - if (args == NULL) - return NULL; - -#if PY_MAJOR_VERSION >= 3 - str = PyUnicode_Format(self->format, args); -#else - str = PyString_Format(self->format, args); -#endif - - Py_DECREF(args); - if (str == NULL) - return NULL; - - result = PyObject_CallFunctionObjArgs(self->type, str, NULL); - Py_DECREF(str); - return result; -} - -static void -DecimalResultProcessor_dealloc(DecimalResultProcessor *self) -{ - Py_XDECREF(self->type); - Py_XDECREF(self->format); -#if PY_MAJOR_VERSION >= 3 - Py_TYPE(self)->tp_free((PyObject*)self); -#else - self->ob_type->tp_free((PyObject*)self); -#endif -} - -static PyMethodDef DecimalResultProcessor_methods[] = { - {"process", (PyCFunction)DecimalResultProcessor_process, METH_O, - "The value processor itself."}, - {NULL} /* Sentinel */ -}; - -static PyTypeObject DecimalResultProcessorType = { - PyVarObject_HEAD_INIT(NULL, 0) - "sqlalchemy.DecimalResultProcessor", /* tp_name */ - sizeof(DecimalResultProcessor), /* tp_basicsize */ - 0, /* tp_itemsize */ - (destructor)DecimalResultProcessor_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - "DecimalResultProcessor objects", /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - DecimalResultProcessor_methods, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - (initproc)DecimalResultProcessor_init, /* tp_init */ - 0, /* tp_alloc */ - 0, /* tp_new */ -}; - -static PyMethodDef module_methods[] = { - {"int_to_boolean", int_to_boolean, METH_O, - "Convert an integer to a boolean."}, - {"to_str", to_str, METH_O, - "Convert any value to its string representation."}, - {"to_float", to_float, METH_O, - "Convert any value to its floating point representation."}, - {"str_to_datetime", str_to_datetime, METH_O, - "Convert an ISO string to a datetime.datetime object."}, - {"str_to_time", str_to_time, METH_O, - "Convert an ISO string to a datetime.time object."}, - {"str_to_date", str_to_date, METH_O, - "Convert an ISO string to a datetime.date object."}, - {NULL, NULL, 0, NULL} /* Sentinel */ -}; - -#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */ -#define PyMODINIT_FUNC void -#endif - - -#if PY_MAJOR_VERSION >= 3 - -static struct PyModuleDef module_def = { - PyModuleDef_HEAD_INIT, - MODULE_NAME, - MODULE_DOC, - -1, - module_methods -}; - -#define INITERROR return NULL - -PyMODINIT_FUNC -PyInit_cprocessors(void) - -#else - -#define INITERROR return - -PyMODINIT_FUNC -initcprocessors(void) - -#endif - -{ - PyObject *m; - - UnicodeResultProcessorType.tp_new = PyType_GenericNew; - if (PyType_Ready(&UnicodeResultProcessorType) < 0) - INITERROR; - - DecimalResultProcessorType.tp_new = PyType_GenericNew; - if (PyType_Ready(&DecimalResultProcessorType) < 0) - INITERROR; - -#if PY_MAJOR_VERSION >= 3 - m = PyModule_Create(&module_def); -#else - m = Py_InitModule3(MODULE_NAME, module_methods, MODULE_DOC); -#endif - if (m == NULL) - INITERROR; - - PyDateTime_IMPORT; - - Py_INCREF(&UnicodeResultProcessorType); - PyModule_AddObject(m, "UnicodeResultProcessor", - (PyObject *)&UnicodeResultProcessorType); - - Py_INCREF(&DecimalResultProcessorType); - PyModule_AddObject(m, "DecimalResultProcessor", - (PyObject *)&DecimalResultProcessorType); - -#if PY_MAJOR_VERSION >= 3 - return m; -#endif -} diff --git a/lib/sqlalchemy/cextension/resultproxy.c b/lib/sqlalchemy/cextension/resultproxy.c deleted file mode 100644 index 218c7b8076..0000000000 --- a/lib/sqlalchemy/cextension/resultproxy.c +++ /dev/null @@ -1,718 +0,0 @@ -/* -resultproxy.c -Copyright (C) 2010-2014 the SQLAlchemy authors and contributors -Copyright (C) 2010-2011 Gaetan de Menten gdementen@gmail.com - -This module is part of SQLAlchemy and is released under -the MIT License: http://www.opensource.org/licenses/mit-license.php -*/ - -#include - -#define MODULE_NAME "cresultproxy" -#define MODULE_DOC "Module containing C versions of core ResultProxy classes." - -#if PY_VERSION_HEX < 0x02050000 && !defined(PY_SSIZE_T_MIN) -typedef int Py_ssize_t; -#define PY_SSIZE_T_MAX INT_MAX -#define PY_SSIZE_T_MIN INT_MIN -typedef Py_ssize_t (*lenfunc)(PyObject *); -#define PyInt_FromSsize_t(x) PyInt_FromLong(x) -typedef intargfunc ssizeargfunc; -#endif - - -/*********** - * Structs * - ***********/ - -typedef struct { - PyObject_HEAD - PyObject *parent; - PyObject *row; - PyObject *processors; - PyObject *keymap; -} BaseRowProxy; - -/**************** - * BaseRowProxy * - ****************/ - -static PyObject * -safe_rowproxy_reconstructor(PyObject *self, PyObject *args) -{ - PyObject *cls, *state, *tmp; - BaseRowProxy *obj; - - if (!PyArg_ParseTuple(args, "OO", &cls, &state)) - return NULL; - - obj = (BaseRowProxy *)PyObject_CallMethod(cls, "__new__", "O", cls); - if (obj == NULL) - return NULL; - - tmp = PyObject_CallMethod((PyObject *)obj, "__setstate__", "O", state); - if (tmp == NULL) { - Py_DECREF(obj); - return NULL; - } - Py_DECREF(tmp); - - if (obj->parent == NULL || obj->row == NULL || - obj->processors == NULL || obj->keymap == NULL) { - PyErr_SetString(PyExc_RuntimeError, - "__setstate__ for BaseRowProxy subclasses must set values " - "for parent, row, processors and keymap"); - Py_DECREF(obj); - return NULL; - } - - return (PyObject *)obj; -} - -static int -BaseRowProxy_init(BaseRowProxy *self, PyObject *args, PyObject *kwds) -{ - PyObject *parent, *row, *processors, *keymap; - - if (!PyArg_UnpackTuple(args, "BaseRowProxy", 4, 4, - &parent, &row, &processors, &keymap)) - return -1; - - Py_INCREF(parent); - self->parent = parent; - - if (!PySequence_Check(row)) { - PyErr_SetString(PyExc_TypeError, "row must be a sequence"); - return -1; - } - Py_INCREF(row); - self->row = row; - - if (!PyList_CheckExact(processors)) { - PyErr_SetString(PyExc_TypeError, "processors must be a list"); - return -1; - } - Py_INCREF(processors); - self->processors = processors; - - if (!PyDict_CheckExact(keymap)) { - PyErr_SetString(PyExc_TypeError, "keymap must be a dict"); - return -1; - } - Py_INCREF(keymap); - self->keymap = keymap; - - return 0; -} - -/* We need the reduce method because otherwise the default implementation - * does very weird stuff for pickle protocol 0 and 1. It calls - * BaseRowProxy.__new__(RowProxy_instance) upon *pickling*. - */ -static PyObject * -BaseRowProxy_reduce(PyObject *self) -{ - PyObject *method, *state; - PyObject *module, *reconstructor, *cls; - - method = PyObject_GetAttrString(self, "__getstate__"); - if (method == NULL) - return NULL; - - state = PyObject_CallObject(method, NULL); - Py_DECREF(method); - if (state == NULL) - return NULL; - - module = PyImport_ImportModule("sqlalchemy.engine.result"); - if (module == NULL) - return NULL; - - reconstructor = PyObject_GetAttrString(module, "rowproxy_reconstructor"); - Py_DECREF(module); - if (reconstructor == NULL) { - Py_DECREF(state); - return NULL; - } - - cls = PyObject_GetAttrString(self, "__class__"); - if (cls == NULL) { - Py_DECREF(reconstructor); - Py_DECREF(state); - return NULL; - } - - return Py_BuildValue("(N(NN))", reconstructor, cls, state); -} - -static void -BaseRowProxy_dealloc(BaseRowProxy *self) -{ - Py_XDECREF(self->parent); - Py_XDECREF(self->row); - Py_XDECREF(self->processors); - Py_XDECREF(self->keymap); -#if PY_MAJOR_VERSION >= 3 - Py_TYPE(self)->tp_free((PyObject *)self); -#else - self->ob_type->tp_free((PyObject *)self); -#endif -} - -static PyObject * -BaseRowProxy_processvalues(PyObject *values, PyObject *processors, int astuple) -{ - Py_ssize_t num_values, num_processors; - PyObject **valueptr, **funcptr, **resultptr; - PyObject *func, *result, *processed_value, *values_fastseq; - - num_values = PySequence_Length(values); - num_processors = PyList_Size(processors); - if (num_values != num_processors) { - PyErr_Format(PyExc_RuntimeError, - "number of values in row (%d) differ from number of column " - "processors (%d)", - (int)num_values, (int)num_processors); - return NULL; - } - - if (astuple) { - result = PyTuple_New(num_values); - } else { - result = PyList_New(num_values); - } - if (result == NULL) - return NULL; - - values_fastseq = PySequence_Fast(values, "row must be a sequence"); - if (values_fastseq == NULL) - return NULL; - - valueptr = PySequence_Fast_ITEMS(values_fastseq); - funcptr = PySequence_Fast_ITEMS(processors); - resultptr = PySequence_Fast_ITEMS(result); - while (--num_values >= 0) { - func = *funcptr; - if (func != Py_None) { - processed_value = PyObject_CallFunctionObjArgs(func, *valueptr, - NULL); - if (processed_value == NULL) { - Py_DECREF(values_fastseq); - Py_DECREF(result); - return NULL; - } - *resultptr = processed_value; - } else { - Py_INCREF(*valueptr); - *resultptr = *valueptr; - } - valueptr++; - funcptr++; - resultptr++; - } - Py_DECREF(values_fastseq); - return result; -} - -static PyListObject * -BaseRowProxy_values(BaseRowProxy *self) -{ - return (PyListObject *)BaseRowProxy_processvalues(self->row, - self->processors, 0); -} - -static PyObject * -BaseRowProxy_iter(BaseRowProxy *self) -{ - PyObject *values, *result; - - values = BaseRowProxy_processvalues(self->row, self->processors, 1); - if (values == NULL) - return NULL; - - result = PyObject_GetIter(values); - Py_DECREF(values); - if (result == NULL) - return NULL; - - return result; -} - -static Py_ssize_t -BaseRowProxy_length(BaseRowProxy *self) -{ - return PySequence_Length(self->row); -} - -static PyObject * -BaseRowProxy_subscript(BaseRowProxy *self, PyObject *key) -{ - PyObject *processors, *values; - PyObject *processor, *value, *processed_value; - PyObject *row, *record, *result, *indexobject; - PyObject *exc_module, *exception, *cstr_obj; -#if PY_MAJOR_VERSION >= 3 - PyObject *bytes; -#endif - char *cstr_key; - long index; - int key_fallback = 0; - int tuple_check = 0; - -#if PY_MAJOR_VERSION < 3 - if (PyInt_CheckExact(key)) { - index = PyInt_AS_LONG(key); - } -#endif - - if (PyLong_CheckExact(key)) { - index = PyLong_AsLong(key); - if ((index == -1) && PyErr_Occurred()) - /* -1 can be either the actual value, or an error flag. */ - return NULL; - } else if (PySlice_Check(key)) { - values = PyObject_GetItem(self->row, key); - if (values == NULL) - return NULL; - - processors = PyObject_GetItem(self->processors, key); - if (processors == NULL) { - Py_DECREF(values); - return NULL; - } - - result = BaseRowProxy_processvalues(values, processors, 1); - Py_DECREF(values); - Py_DECREF(processors); - return result; - } else { - record = PyDict_GetItem((PyObject *)self->keymap, key); - if (record == NULL) { - record = PyObject_CallMethod(self->parent, "_key_fallback", - "O", key); - if (record == NULL) - return NULL; - key_fallback = 1; - } - - indexobject = PyTuple_GetItem(record, 2); - if (indexobject == NULL) - return NULL; - - if (key_fallback) { - Py_DECREF(record); - } - - if (indexobject == Py_None) { - exc_module = PyImport_ImportModule("sqlalchemy.exc"); - if (exc_module == NULL) - return NULL; - - exception = PyObject_GetAttrString(exc_module, - "InvalidRequestError"); - Py_DECREF(exc_module); - if (exception == NULL) - return NULL; - - // wow. this seems quite excessive. - cstr_obj = PyObject_Str(key); - if (cstr_obj == NULL) - return NULL; - -/* - FIXME: raise encoding error exception (in both versions below) - if the key contains non-ascii chars, instead of an - InvalidRequestError without any message like in the - python version. -*/ -#if PY_MAJOR_VERSION >= 3 - bytes = PyUnicode_AsASCIIString(cstr_obj); - if (bytes == NULL) - return NULL; - cstr_key = PyBytes_AS_STRING(bytes); -#else - cstr_key = PyString_AsString(cstr_obj); -#endif - if (cstr_key == NULL) { - Py_DECREF(cstr_obj); - return NULL; - } - Py_DECREF(cstr_obj); - - PyErr_Format(exception, - "Ambiguous column name '%.200s' in result set! " - "try 'use_labels' option on select statement.", cstr_key); - return NULL; - } - -#if PY_MAJOR_VERSION >= 3 - index = PyLong_AsLong(indexobject); -#else - index = PyInt_AsLong(indexobject); -#endif - if ((index == -1) && PyErr_Occurred()) - /* -1 can be either the actual value, or an error flag. */ - return NULL; - } - processor = PyList_GetItem(self->processors, index); - if (processor == NULL) - return NULL; - - row = self->row; - if (PyTuple_CheckExact(row)) { - value = PyTuple_GetItem(row, index); - tuple_check = 1; - } - else { - value = PySequence_GetItem(row, index); - tuple_check = 0; - } - - if (value == NULL) - return NULL; - - if (processor != Py_None) { - processed_value = PyObject_CallFunctionObjArgs(processor, value, NULL); - if (!tuple_check) { - Py_DECREF(value); - } - return processed_value; - } else { - if (tuple_check) { - Py_INCREF(value); - } - return value; - } -} - -static PyObject * -BaseRowProxy_getitem(PyObject *self, Py_ssize_t i) -{ - PyObject *index; - -#if PY_MAJOR_VERSION >= 3 - index = PyLong_FromSsize_t(i); -#else - index = PyInt_FromSsize_t(i); -#endif - return BaseRowProxy_subscript((BaseRowProxy*)self, index); -} - -static PyObject * -BaseRowProxy_getattro(BaseRowProxy *self, PyObject *name) -{ - PyObject *tmp; -#if PY_MAJOR_VERSION >= 3 - PyObject *err_bytes; -#endif - - if (!(tmp = PyObject_GenericGetAttr((PyObject *)self, name))) { - if (!PyErr_ExceptionMatches(PyExc_AttributeError)) - return NULL; - PyErr_Clear(); - } - else - return tmp; - - tmp = BaseRowProxy_subscript(self, name); - if (tmp == NULL && PyErr_ExceptionMatches(PyExc_KeyError)) { - -#if PY_MAJOR_VERSION >= 3 - err_bytes = PyUnicode_AsASCIIString(name); - if (err_bytes == NULL) - return NULL; - PyErr_Format( - PyExc_AttributeError, - "Could not locate column in row for column '%.200s'", - PyBytes_AS_STRING(err_bytes) - ); -#else - PyErr_Format( - PyExc_AttributeError, - "Could not locate column in row for column '%.200s'", - PyString_AsString(name) - ); -#endif - return NULL; - } - return tmp; -} - -/*********************** - * getters and setters * - ***********************/ - -static PyObject * -BaseRowProxy_getparent(BaseRowProxy *self, void *closure) -{ - Py_INCREF(self->parent); - return self->parent; -} - -static int -BaseRowProxy_setparent(BaseRowProxy *self, PyObject *value, void *closure) -{ - PyObject *module, *cls; - - if (value == NULL) { - PyErr_SetString(PyExc_TypeError, - "Cannot delete the 'parent' attribute"); - return -1; - } - - module = PyImport_ImportModule("sqlalchemy.engine.result"); - if (module == NULL) - return -1; - - cls = PyObject_GetAttrString(module, "ResultMetaData"); - Py_DECREF(module); - if (cls == NULL) - return -1; - - if (PyObject_IsInstance(value, cls) != 1) { - PyErr_SetString(PyExc_TypeError, - "The 'parent' attribute value must be an instance of " - "ResultMetaData"); - return -1; - } - Py_DECREF(cls); - Py_XDECREF(self->parent); - Py_INCREF(value); - self->parent = value; - - return 0; -} - -static PyObject * -BaseRowProxy_getrow(BaseRowProxy *self, void *closure) -{ - Py_INCREF(self->row); - return self->row; -} - -static int -BaseRowProxy_setrow(BaseRowProxy *self, PyObject *value, void *closure) -{ - if (value == NULL) { - PyErr_SetString(PyExc_TypeError, - "Cannot delete the 'row' attribute"); - return -1; - } - - if (!PySequence_Check(value)) { - PyErr_SetString(PyExc_TypeError, - "The 'row' attribute value must be a sequence"); - return -1; - } - - Py_XDECREF(self->row); - Py_INCREF(value); - self->row = value; - - return 0; -} - -static PyObject * -BaseRowProxy_getprocessors(BaseRowProxy *self, void *closure) -{ - Py_INCREF(self->processors); - return self->processors; -} - -static int -BaseRowProxy_setprocessors(BaseRowProxy *self, PyObject *value, void *closure) -{ - if (value == NULL) { - PyErr_SetString(PyExc_TypeError, - "Cannot delete the 'processors' attribute"); - return -1; - } - - if (!PyList_CheckExact(value)) { - PyErr_SetString(PyExc_TypeError, - "The 'processors' attribute value must be a list"); - return -1; - } - - Py_XDECREF(self->processors); - Py_INCREF(value); - self->processors = value; - - return 0; -} - -static PyObject * -BaseRowProxy_getkeymap(BaseRowProxy *self, void *closure) -{ - Py_INCREF(self->keymap); - return self->keymap; -} - -static int -BaseRowProxy_setkeymap(BaseRowProxy *self, PyObject *value, void *closure) -{ - if (value == NULL) { - PyErr_SetString(PyExc_TypeError, - "Cannot delete the 'keymap' attribute"); - return -1; - } - - if (!PyDict_CheckExact(value)) { - PyErr_SetString(PyExc_TypeError, - "The 'keymap' attribute value must be a dict"); - return -1; - } - - Py_XDECREF(self->keymap); - Py_INCREF(value); - self->keymap = value; - - return 0; -} - -static PyGetSetDef BaseRowProxy_getseters[] = { - {"_parent", - (getter)BaseRowProxy_getparent, (setter)BaseRowProxy_setparent, - "ResultMetaData", - NULL}, - {"_row", - (getter)BaseRowProxy_getrow, (setter)BaseRowProxy_setrow, - "Original row tuple", - NULL}, - {"_processors", - (getter)BaseRowProxy_getprocessors, (setter)BaseRowProxy_setprocessors, - "list of type processors", - NULL}, - {"_keymap", - (getter)BaseRowProxy_getkeymap, (setter)BaseRowProxy_setkeymap, - "Key to (processor, index) dict", - NULL}, - {NULL} -}; - -static PyMethodDef BaseRowProxy_methods[] = { - {"values", (PyCFunction)BaseRowProxy_values, METH_NOARGS, - "Return the values represented by this BaseRowProxy as a list."}, - {"__reduce__", (PyCFunction)BaseRowProxy_reduce, METH_NOARGS, - "Pickle support method."}, - {NULL} /* Sentinel */ -}; - -static PySequenceMethods BaseRowProxy_as_sequence = { - (lenfunc)BaseRowProxy_length, /* sq_length */ - 0, /* sq_concat */ - 0, /* sq_repeat */ - (ssizeargfunc)BaseRowProxy_getitem, /* sq_item */ - 0, /* sq_slice */ - 0, /* sq_ass_item */ - 0, /* sq_ass_slice */ - 0, /* sq_contains */ - 0, /* sq_inplace_concat */ - 0, /* sq_inplace_repeat */ -}; - -static PyMappingMethods BaseRowProxy_as_mapping = { - (lenfunc)BaseRowProxy_length, /* mp_length */ - (binaryfunc)BaseRowProxy_subscript, /* mp_subscript */ - 0 /* mp_ass_subscript */ -}; - -static PyTypeObject BaseRowProxyType = { - PyVarObject_HEAD_INIT(NULL, 0) - "sqlalchemy.cresultproxy.BaseRowProxy", /* tp_name */ - sizeof(BaseRowProxy), /* tp_basicsize */ - 0, /* tp_itemsize */ - (destructor)BaseRowProxy_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - &BaseRowProxy_as_sequence, /* tp_as_sequence */ - &BaseRowProxy_as_mapping, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - (getattrofunc)BaseRowProxy_getattro,/* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - "BaseRowProxy is a abstract base class for RowProxy", /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - (getiterfunc)BaseRowProxy_iter, /* tp_iter */ - 0, /* tp_iternext */ - BaseRowProxy_methods, /* tp_methods */ - 0, /* tp_members */ - BaseRowProxy_getseters, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - (initproc)BaseRowProxy_init, /* tp_init */ - 0, /* tp_alloc */ - 0 /* tp_new */ -}; - -static PyMethodDef module_methods[] = { - {"safe_rowproxy_reconstructor", safe_rowproxy_reconstructor, METH_VARARGS, - "reconstruct a RowProxy instance from its pickled form."}, - {NULL, NULL, 0, NULL} /* Sentinel */ -}; - -#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */ -#define PyMODINIT_FUNC void -#endif - - -#if PY_MAJOR_VERSION >= 3 - -static struct PyModuleDef module_def = { - PyModuleDef_HEAD_INIT, - MODULE_NAME, - MODULE_DOC, - -1, - module_methods -}; - -#define INITERROR return NULL - -PyMODINIT_FUNC -PyInit_cresultproxy(void) - -#else - -#define INITERROR return - -PyMODINIT_FUNC -initcresultproxy(void) - -#endif - -{ - PyObject *m; - - BaseRowProxyType.tp_new = PyType_GenericNew; - if (PyType_Ready(&BaseRowProxyType) < 0) - INITERROR; - -#if PY_MAJOR_VERSION >= 3 - m = PyModule_Create(&module_def); -#else - m = Py_InitModule3(MODULE_NAME, module_methods, MODULE_DOC); -#endif - if (m == NULL) - INITERROR; - - Py_INCREF(&BaseRowProxyType); - PyModule_AddObject(m, "BaseRowProxy", (PyObject *)&BaseRowProxyType); - -#if PY_MAJOR_VERSION >= 3 - return m; -#endif -} diff --git a/lib/sqlalchemy/cextension/utils.c b/lib/sqlalchemy/cextension/utils.c deleted file mode 100644 index 377ba8a8d6..0000000000 --- a/lib/sqlalchemy/cextension/utils.c +++ /dev/null @@ -1,225 +0,0 @@ -/* -utils.c -Copyright (C) 2012-2014 the SQLAlchemy authors and contributors - -This module is part of SQLAlchemy and is released under -the MIT License: http://www.opensource.org/licenses/mit-license.php -*/ - -#include - -#define MODULE_NAME "cutils" -#define MODULE_DOC "Module containing C versions of utility functions." - -/* - Given arguments from the calling form *multiparams, **params, - return a list of bind parameter structures, usually a list of - dictionaries. - - In the case of 'raw' execution which accepts positional parameters, - it may be a list of tuples or lists. - - */ -static PyObject * -distill_params(PyObject *self, PyObject *args) -{ - PyObject *multiparams, *params; - PyObject *enclosing_list, *double_enclosing_list; - PyObject *zero_element, *zero_element_item; - Py_ssize_t multiparam_size, zero_element_length; - - if (!PyArg_UnpackTuple(args, "_distill_params", 2, 2, &multiparams, ¶ms)) { - return NULL; - } - - if (multiparams != Py_None) { - multiparam_size = PyTuple_Size(multiparams); - if (multiparam_size < 0) { - return NULL; - } - } - else { - multiparam_size = 0; - } - - if (multiparam_size == 0) { - if (params != Py_None && PyDict_Size(params) != 0) { - enclosing_list = PyList_New(1); - if (enclosing_list == NULL) { - return NULL; - } - Py_INCREF(params); - if (PyList_SetItem(enclosing_list, 0, params) == -1) { - Py_DECREF(params); - Py_DECREF(enclosing_list); - return NULL; - } - } - else { - enclosing_list = PyList_New(0); - if (enclosing_list == NULL) { - return NULL; - } - } - return enclosing_list; - } - else if (multiparam_size == 1) { - zero_element = PyTuple_GetItem(multiparams, 0); - if (PyTuple_Check(zero_element) || PyList_Check(zero_element)) { - zero_element_length = PySequence_Length(zero_element); - - if (zero_element_length != 0) { - zero_element_item = PySequence_GetItem(zero_element, 0); - if (zero_element_item == NULL) { - return NULL; - } - } - else { - zero_element_item = NULL; - } - - if (zero_element_length == 0 || - ( - PyObject_HasAttrString(zero_element_item, "__iter__") && - !PyObject_HasAttrString(zero_element_item, "strip") - ) - ) { - /* - * execute(stmt, [{}, {}, {}, ...]) - * execute(stmt, [(), (), (), ...]) - */ - Py_XDECREF(zero_element_item); - Py_INCREF(zero_element); - return zero_element; - } - else { - /* - * execute(stmt, ("value", "value")) - */ - Py_XDECREF(zero_element_item); - enclosing_list = PyList_New(1); - if (enclosing_list == NULL) { - return NULL; - } - Py_INCREF(zero_element); - if (PyList_SetItem(enclosing_list, 0, zero_element) == -1) { - Py_DECREF(zero_element); - Py_DECREF(enclosing_list); - return NULL; - } - return enclosing_list; - } - } - else if (PyObject_HasAttrString(zero_element, "keys")) { - /* - * execute(stmt, {"key":"value"}) - */ - enclosing_list = PyList_New(1); - if (enclosing_list == NULL) { - return NULL; - } - Py_INCREF(zero_element); - if (PyList_SetItem(enclosing_list, 0, zero_element) == -1) { - Py_DECREF(zero_element); - Py_DECREF(enclosing_list); - return NULL; - } - return enclosing_list; - } else { - enclosing_list = PyList_New(1); - if (enclosing_list == NULL) { - return NULL; - } - double_enclosing_list = PyList_New(1); - if (double_enclosing_list == NULL) { - Py_DECREF(enclosing_list); - return NULL; - } - Py_INCREF(zero_element); - if (PyList_SetItem(enclosing_list, 0, zero_element) == -1) { - Py_DECREF(zero_element); - Py_DECREF(enclosing_list); - Py_DECREF(double_enclosing_list); - return NULL; - } - if (PyList_SetItem(double_enclosing_list, 0, enclosing_list) == -1) { - Py_DECREF(zero_element); - Py_DECREF(enclosing_list); - Py_DECREF(double_enclosing_list); - return NULL; - } - return double_enclosing_list; - } - } - else { - zero_element = PyTuple_GetItem(multiparams, 0); - if (PyObject_HasAttrString(zero_element, "__iter__") && - !PyObject_HasAttrString(zero_element, "strip") - ) { - Py_INCREF(multiparams); - return multiparams; - } - else { - enclosing_list = PyList_New(1); - if (enclosing_list == NULL) { - return NULL; - } - Py_INCREF(multiparams); - if (PyList_SetItem(enclosing_list, 0, multiparams) == -1) { - Py_DECREF(multiparams); - Py_DECREF(enclosing_list); - return NULL; - } - return enclosing_list; - } - } -} - -static PyMethodDef module_methods[] = { - {"_distill_params", distill_params, METH_VARARGS, - "Distill an execute() parameter structure."}, - {NULL, NULL, 0, NULL} /* Sentinel */ -}; - -#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */ -#define PyMODINIT_FUNC void -#endif - -#if PY_MAJOR_VERSION >= 3 - -static struct PyModuleDef module_def = { - PyModuleDef_HEAD_INIT, - MODULE_NAME, - MODULE_DOC, - -1, - module_methods - }; -#endif - - -#if PY_MAJOR_VERSION >= 3 -PyMODINIT_FUNC -PyInit_cutils(void) -#else -PyMODINIT_FUNC -initcutils(void) -#endif -{ - PyObject *m; - -#if PY_MAJOR_VERSION >= 3 - m = PyModule_Create(&module_def); -#else - m = Py_InitModule3(MODULE_NAME, module_methods, MODULE_DOC); -#endif - -#if PY_MAJOR_VERSION >= 3 - if (m == NULL) - return NULL; - return m; -#else - if (m == NULL) - return; -#endif -} - diff --git a/lib/sqlalchemy/connectors/__init__.py b/lib/sqlalchemy/connectors/__init__.py index 761024fe7f..d72c390cfd 100644 --- a/lib/sqlalchemy/connectors/__init__.py +++ b/lib/sqlalchemy/connectors/__init__.py @@ -1,5 +1,6 @@ # connectors/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/lib/sqlalchemy/connectors/mxodbc.py b/lib/sqlalchemy/connectors/mxodbc.py index e5562a25e3..9fc0ce6b5b 100644 --- a/lib/sqlalchemy/connectors/mxodbc.py +++ b/lib/sqlalchemy/connectors/mxodbc.py @@ -1,5 +1,6 @@ # connectors/mxodbc.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -79,8 +80,8 @@ def error_handler(connection, cursor, errorclass, errorvalue): if issubclass(errorclass, MxOdbcWarning): errorclass.__bases__ = (Warning,) warnings.warn(message=str(errorvalue), - category=errorclass, - stacklevel=2) + category=errorclass, + stacklevel=2) else: raise errorclass(errorvalue) return error_handler @@ -134,7 +135,7 @@ def _get_server_version_info(self, connection): def _get_direct(self, context): if context: native_odbc_execute = context.execution_options.\ - get('native_odbc_execute', 'auto') + get('native_odbc_execute', 'auto') # default to direct=True in all cases, is more generally # compatible especially with SQL Server return False if native_odbc_execute is True else True diff --git a/lib/sqlalchemy/connectors/mysqldb.py b/lib/sqlalchemy/connectors/mysqldb.py deleted file mode 100644 index e4efb22018..0000000000 --- a/lib/sqlalchemy/connectors/mysqldb.py +++ /dev/null @@ -1,144 +0,0 @@ -# connectors/mysqldb.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors -# -# This module is part of SQLAlchemy and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - -"""Define behaviors common to MySQLdb dialects. - -Currently includes MySQL and Drizzle. - -""" - -from . import Connector -from ..engine import base as engine_base, default -from ..sql import operators as sql_operators -from .. import exc, log, schema, sql, types as sqltypes, util, processors -import re - - -# the subclassing of Connector by all classes -# here is not strictly necessary - - -class MySQLDBExecutionContext(Connector): - - @property - def rowcount(self): - if hasattr(self, '_rowcount'): - return self._rowcount - else: - return self.cursor.rowcount - - -class MySQLDBCompiler(Connector): - def visit_mod_binary(self, binary, operator, **kw): - return self.process(binary.left, **kw) + " %% " + \ - self.process(binary.right, **kw) - - def post_process_text(self, text): - return text.replace('%', '%%') - - -class MySQLDBIdentifierPreparer(Connector): - - def _escape_identifier(self, value): - value = value.replace(self.escape_quote, self.escape_to_quote) - return value.replace("%", "%%") - - -class MySQLDBConnector(Connector): - driver = 'mysqldb' - supports_unicode_statements = False - supports_sane_rowcount = True - supports_sane_multi_rowcount = True - - supports_native_decimal = True - - default_paramstyle = 'format' - - @classmethod - def dbapi(cls): - # is overridden when pymysql is used - return __import__('MySQLdb') - - - def do_executemany(self, cursor, statement, parameters, context=None): - rowcount = cursor.executemany(statement, parameters) - if context is not None: - context._rowcount = rowcount - - def create_connect_args(self, url): - opts = url.translate_connect_args(database='db', username='user', - password='passwd') - opts.update(url.query) - - util.coerce_kw_type(opts, 'compress', bool) - util.coerce_kw_type(opts, 'connect_timeout', int) - util.coerce_kw_type(opts, 'read_timeout', int) - util.coerce_kw_type(opts, 'client_flag', int) - util.coerce_kw_type(opts, 'local_infile', int) - # Note: using either of the below will cause all strings to be returned - # as Unicode, both in raw SQL operations and with column types like - # String and MSString. - util.coerce_kw_type(opts, 'use_unicode', bool) - util.coerce_kw_type(opts, 'charset', str) - - # Rich values 'cursorclass' and 'conv' are not supported via - # query string. - - ssl = {} - keys = ['ssl_ca', 'ssl_key', 'ssl_cert', 'ssl_capath', 'ssl_cipher'] - for key in keys: - if key in opts: - ssl[key[4:]] = opts[key] - util.coerce_kw_type(ssl, key[4:], str) - del opts[key] - if ssl: - opts['ssl'] = ssl - - # FOUND_ROWS must be set in CLIENT_FLAGS to enable - # supports_sane_rowcount. - client_flag = opts.get('client_flag', 0) - if self.dbapi is not None: - try: - CLIENT_FLAGS = __import__( - self.dbapi.__name__ + '.constants.CLIENT' - ).constants.CLIENT - client_flag |= CLIENT_FLAGS.FOUND_ROWS - except (AttributeError, ImportError): - self.supports_sane_rowcount = False - opts['client_flag'] = client_flag - return [[], opts] - - def _get_server_version_info(self, connection): - dbapi_con = connection.connection - version = [] - r = re.compile('[.\-]') - for n in r.split(dbapi_con.get_server_info()): - try: - version.append(int(n)) - except ValueError: - version.append(n) - return tuple(version) - - def _extract_error_code(self, exception): - return exception.args[0] - - def _detect_charset(self, connection): - """Sniff out the character set in use for connection results.""" - - try: - # note: the SQL here would be - # "SHOW VARIABLES LIKE 'character_set%%'" - cset_name = connection.connection.character_set_name - except AttributeError: - util.warn( - "No 'character_set_name' can be detected with " - "this MySQL-Python version; " - "please upgrade to a recent version of MySQL-Python. " - "Assuming latin1.") - return 'latin1' - else: - return cset_name() - diff --git a/lib/sqlalchemy/connectors/pyodbc.py b/lib/sqlalchemy/connectors/pyodbc.py index 284de288ae..68bbcc4356 100644 --- a/lib/sqlalchemy/connectors/pyodbc.py +++ b/lib/sqlalchemy/connectors/pyodbc.py @@ -1,5 +1,6 @@ # connectors/pyodbc.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -25,7 +26,7 @@ class PyODBCConnector(Connector): supports_native_decimal = True default_paramstyle = 'named' - # for non-DSN connections, this should + # for non-DSN connections, this *may* be used to # hold the desired driver name pyodbc_driver_name = None @@ -65,19 +66,30 @@ def create_connect_args(self, url): connectors = [util.unquote_plus(keys.pop('odbc_connect'))] else: dsn_connection = 'dsn' in keys or \ - ('host' in keys and 'database' not in keys) + ('host' in keys and 'database' not in keys) if dsn_connection: - connectors = ['dsn=%s' % (keys.pop('host', '') or \ - keys.pop('dsn', ''))] + connectors = ['dsn=%s' % (keys.pop('host', '') or + keys.pop('dsn', ''))] else: port = '' - if 'port' in keys and not 'port' in query: + if 'port' in keys and 'port' not in query: port = ',%d' % int(keys.pop('port')) - connectors = ["DRIVER={%s}" % - keys.pop('driver', self.pyodbc_driver_name), - 'Server=%s%s' % (keys.pop('host', ''), port), - 'Database=%s' % keys.pop('database', '')] + connectors = [] + driver = keys.pop('driver', self.pyodbc_driver_name) + if driver is None: + util.warn( + "No driver name specified; " + "this is expected by PyODBC when using " + "DSN-less connections") + else: + connectors.append("DRIVER={%s}" % driver) + + connectors.extend( + [ + 'Server=%s%s' % (keys.pop('host', ''), port), + 'Database=%s' % keys.pop('database', '') + ]) user = keys.pop("user", None) if user: @@ -92,7 +104,7 @@ def create_connect_args(self, url): # you query a cp1253 encoded database from a latin1 client... if 'odbc_autotranslate' in keys: connectors.append("AutoTranslate=%s" % - keys.pop("odbc_autotranslate")) + keys.pop("odbc_autotranslate")) connectors.extend(['%s=%s' % (k, v) for k, v in keys.items()]) return [[";".join(connectors)], connect_args] @@ -100,7 +112,7 @@ def create_connect_args(self, url): def is_disconnect(self, e, connection, cursor): if isinstance(e, self.dbapi.ProgrammingError): return "The cursor's connection has been closed." in str(e) or \ - 'Attempt to use a closed connection.' in str(e) + 'Attempt to use a closed connection.' in str(e) elif isinstance(e, self.dbapi.Error): return '[08S01]' in str(e) else: @@ -116,9 +128,9 @@ def initialize(self, connection): _sql_driver_name = dbapi_con.getinfo(pyodbc.SQL_DRIVER_NAME) self.freetds = bool(re.match(r".*libtdsodbc.*\.so", _sql_driver_name - )) + )) self.easysoft = bool(re.match(r".*libessqlsrv.*\.so", _sql_driver_name - )) + )) if self.freetds: self.freetds_driver_version = dbapi_con.getinfo( @@ -141,6 +153,7 @@ def initialize(self, connection): # run other initialization which asks for user name, etc. super(PyODBCConnector, self).initialize(connection) + def _dbapi_version(self): if not self.dbapi: return () @@ -148,9 +161,9 @@ def _dbapi_version(self): def _parse_dbapi_version(self, vers): m = re.match( - r'(?:py.*-)?([\d\.]+)(?:-(\w+))?', - vers - ) + r'(?:py.*-)?([\d\.]+)(?:-(\w+))?', + vers + ) if not m: return () vers = tuple([int(x) for x in m.group(1).split(".")]) diff --git a/lib/sqlalchemy/connectors/zxJDBC.py b/lib/sqlalchemy/connectors/zxJDBC.py index e0bbc57343..e7b2dc9abf 100644 --- a/lib/sqlalchemy/connectors/zxJDBC.py +++ b/lib/sqlalchemy/connectors/zxJDBC.py @@ -1,5 +1,6 @@ # connectors/zxJDBC.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -35,17 +36,17 @@ def _create_jdbc_url(self, url): """Create a JDBC url from a :class:`~sqlalchemy.engine.url.URL`""" return 'jdbc:%s://%s%s/%s' % (self.jdbc_db_name, url.host, url.port is not None - and ':%s' % url.port or '', + and ':%s' % url.port or '', url.database) def create_connect_args(self, url): opts = self._driver_kwargs() opts.update(url.query) return [ - [self._create_jdbc_url(url), - url.username, url.password, - self.jdbc_driver_name], - opts] + [self._create_jdbc_url(url), + url.username, url.password, + self.jdbc_driver_name], + opts] def is_disconnect(self, e, connection, cursor): if not isinstance(e, self.dbapi.ProgrammingError): diff --git a/lib/sqlalchemy/cprocessors.pyd b/lib/sqlalchemy/cprocessors.pyd new file mode 100644 index 0000000000000000000000000000000000000000..f10b2386c5fae88273edbd3f51087a80642455b4 GIT binary patch literal 11776 zcmeHNe|%KcmA^@t5Prl7KL(>1eMtBiS4qaJzX z|2-R9ao_bfTeMMju{0xd8e_9TIp3uA769gPe0>H1!ZR7O(F>0SSp*#l4jz+Z4iaNQ zP4Al&se-Wva5t7Q=BB~cgN|uX5L)1`#rr(4gt3RY<7XaJtSKEDSoh4ri(I6+i+KR! zkHlC@vFi6~UdB{}he0Qsjy4Z1<*7nC6q{m9CO!yUP6AAMsu=4pj+lacj+iSC+Fzvz z0fk&uv8n{aKF01PRtp*h_j0t9r;4#_Q@UTFUqA%p_^BdUx79CTOs*V?*=2*FdkuBt zdyDqS`hLSvj$T(kc)INhVnBp=@v#g}yL1B0h|Mq@7ZM3W&Xx7t^9k>9{O#8O^l64; zHu`1#T*JZBAJ`a>APHY)n7%xPF8LuFi~b&H+7QV9aH9OZ5Fz*mPO1&Y{un5ZvJQEqA*LtK^>h3)eHQ=08Xc zXyruPNHlaxXpoz*BEp&oleiDRy^yi^SoS(McQL>{;x6`bw%c^^hXi^Yn^Rm|CS0_4 z`3%E2-A$M7ICd3dH98E}=rx5+b^5hAdQ)LD2ynUvPV2T=qy%_51Udjq^bLF-K#PEq z`#>e9(NJQBHFXL3dXSa_n1i_<%@~ln9#0Y{zPC!O$Gy}|uE$zFEM+}>oRGVf0M=tM zfLM=j66s9qL7L5_a22-EdXyxC+yWg(Nk=l)cVXj*k7cbh)8SL}L^?dpV|{`Xau=EA z|0%`%Ys4Y*dRm-e9&wIdX~wyk;@pBb=ONB*h%=l)AQ7SMq(DTn+zd!+Bu$*hvjo!Q zC*qls*8%Rp-VzbVtEroe;}R~O636+Rkh_{dk0Uo_BD08erZ|#j#8IZac2gXi&9L=2 z&f&xHwJg1WM^7`O_bc>7^nS^sw~Z5We@mdpk-&anE#9L&gY*#Pqo}TVa9Mg2}LCJuc_Qr zUkT^_N?6Z-gtQBe=tonQ^dV9a-zy=55N=e9royT^{YAu?JE`Sxqr}#$^IZB3`4pdW z8PTb#i}y=)@d3$joR?IRZamfgWoNQKFF903VX*Js)Q1pV5S=L-nX>-;89m#A0~jWK zR-WArSyM^UGZBw?XCC5xRGTmBPLyp_SAcjqqguO9)~j+*YvuS|`HUp0^SJ~-x`bJm zxe3|WRF!es_F~2#neaTUQC>^P%hi`svQT={2C)CU)O_ISNGvlS@CBIpAPIt$T;43HuYR_b;GU*56NA?CnIsV&A%F7aT>Nt}QG#9A#W?jK1U5cR5a)a<}d7%jo-hAGEH1`^G0ISi0>sQ{;C3 ziQ67UMKT;O36bwZ1fTz3;{c5HZKk72Pwscnt1WwxhY+8?^oJ}OvW3)`2Y?0$t%gw9 zVZ-qtkqz4+o*K3n0HEk*(x)GQM?V5)IPNxu5^xXJ-UNWq^`?;F@N&(x81cR3RJ6=k z?qXPNoHKAJ=ea#>INl??+txuNyKMmiSV>z?b;ig-Ilho3_T!`v=RO8VY%}q<;eFx} z;8saEcu>~&%H7R5a(7#v+})Ay>TZ#oE1fqvZ>AJ1GwSu@F8yTX5qnoTJkV`N5thn9 z`wte9Kl^I*_quH_fTYO!ltpj>-ZkfpCE;=fbONo@UE$R&))Sm zIeFjGSI|Fs1bA8h$$tZgf0SYG8U&Jzlm6c6k@~y2$w)l_1&EYQ1nMqyD+jfiW}GUq zfFsu+Yd#@YdUD?Z7_>D)hQgYd2X^IPYy$-Lm;w}!#2>L5gSN$xqF-5+@6ne$DBowh z1QfYqX*qzl0NTC=B=LO+!{DrXeeZ4(uhS114hJ}f?Fc@aon(kU|0-Dz7Rporu;Kbq z@lcwa9_OvjZ&B_hb1TAg%h`mSDyh>)^^aWoo4+6(-OY(^hXRdVhhwuV9CrbWO)onJ z^2z;RBW{{4{!wNO$86g?nhfn4#*jWtN1(1_+8meuf_?<6lUVYpVOV7Qh8_$~vI;O6 zlNC2IVppI@ejU8B!!#@dykuwe6T@)n1y5id>Y;pw#c&$0bQy5O$ckN+)Vc@49}drd zap3{|C>;#73*e)Qq2wd>S<^4e*O6?@YIUJBXp7Rmk4Hy|+q=V~4|NaBms z3iJ3^Sf`KS45z=3XvA!ZDAvQJ59&zamqZ2D%!2PZssG6^DDFME6|i#h6Np-8hoi`N z7e8|R0Ji}As4mCH@I#INTzCq!k-yoMgRmurF{&5R7jo&Aq_mzpg;SOb3f3OKh1#g6 z@1w1-ijE%Ti3c#AiX#pf*qUT6ZTmIVIXZF}jw?;dGoY9TZ{Y*>Q10OHh~J`#>*$9F zg4_S$u(1#HqCE$x0_X5>$UOz~=RjM}(zyU5=g4{%g*?+3lH+@FB<^#+MlVKTWf!&Is5t^7t60jVqK`;IMY>mSe>prbF!Xy zRSv0kr!gSM2a0Md52$aE1$`jp#7Z%7=>s^THXJt$ob3|dMu0Hqi|a2NsMCjtvXMdt zl3lMV97tUB;ZXc|#z-cw^N60Ys5uX5x9A`Edjo&p!QX9olYUAroSv5cGu;)%vJJ;n zFwE~f8UK7U{y3M=))w~{mN5BOrJLVa{1aMTDp2x=IBnRrf<1Bv9jkHdc-amfofNP? z0&lK9j@}gfeiDInxy6iw;KG=4SJ^RnCuW8Rl!e3c&N5-nRr!X?UNh|KE+~ez zx({Cgb~(AlKPtZTnDxC-WGvG7yOqD2@vd5l8B4PMU$Xvdxx2nd)-&{+?)t*SyZC-6 z4SDq%&w!--pzU9Z%&*s!N;1quegJNsVfewM7z~xYcuzU@xqkbD13Lbh;t2Vd`TVBb zwB9e^4gnt$@Vf%40-h4k^11IS6KfIl1_5OO3k94l;73CKlz^`ZxL?5M1$>$As)mmi<&_35TkH z4Qc&S(NBk#Txn*@0$V1tX;WG6xs2^bD=B4c!OSUa=8|k2CerA1@eCRRFIArVKsLy? zH5gAiA=97F*v0t8vVnAB8max{n1Y$CVDAi8piO55P19Jxk{ni0dfw={#~njL#(kb6 zhdID^_@^-k_>P8I{r`>QCJHu-<32&bMaHOP!gc81j=n?&@nqPuncY8?+2N19bVg(r z#SOmFcsjkz!39qzT7@M)eF~cn+3BU(k!d2-RWskV+w}Fd>UhRPFba>VT*D@hP$&aqZ(UebaH6F%hLMUT*jO()WV_0M0yWmYdO(SD}zzoEUs=dMBvRKHc1;QbaU^~j#x^aDC zO3#79-GOJm+Zo#i4fSDvEU0*#e!r=Uv1?$f+8c=yxvo7DRuPMpSy+O)P(U-qmw2vR z!d@V4oe{;;pm_bA_@4@^f1MIi)PQffawm&)YHi_A=~c!4U=aTSX^TejU!$$~q0x9# zbwfo7CtsD}Q9={~Pb3@&X^P60XL{N(qp;88gYlO#JONk+qn*7xj(ZgL^C_*Wg6Nc6 z8vLG642BkGc>Fx5NsVtYkGDN~n@8ygU^d@m9#6F=8c}?K)_~8`<_-CSFw0gEJrZg5 zMYLssAbD~ZD88WL4S6)0X&}@Z_Jq9cN)#ygAundNHOmtV!P(nc7o;&!WF47Vn(5)x z6PX@C>7UXVRFnvNi`4|8-Zep`sZB-fT!A&JS4GT&!6@6Fbz@9XJKc)fN_iOaDb?W^ zkIEyN*C|?4z;`<*Fm@mV{c8G1LYk?l4qwY)3e~NKeM&S6@_HKE*csK7_NG9);*4T` z3SsPq+J@z|uCh{dh0^P(^?%6?sJB%6ttO-ILUW_h9<>ILF(3Z_n+%54XmO;|&uH0F zXz9wo+>_LC%5>1NGXD#7B zdld~4l%l+tNtY}tE-fjEULq})?x1=m;qL+Hj{v+v0M`iSvuH_d@uE`RB*3dhD_(SE zjIC&7?6d7$Etz&sll$Dhq}xujo1$;rCe0vCj^gmSVvsU_9+1iPO|;941Z^956xu9M zWUfi3ub8)V`IX;zq31;zI3{kdw(up`)W)-@H=BYt(}!$I|MylZ+br^ ze{duTU76Wp@@h^0Buh|0cm5X}$24=vu9ZLK!mlLbERmtJjPGRX zB(YwT@&B*%iTfO{UnpDHnCLIlH03r$m9#LnDX|eI_bJplA=fGDB*ai4tzxg@P^?68 z`0!GZ zg8s08qiOs;K|e0w`q>lhj|zH`Y|@A3OyrjfI-mS!yv&~lk}r&mz{wBmTW#%+QQ9C8 z6EFRRB%Kh6+bVvrL`BA>Y+BrkwWi6%^;A3Eb(8H|lhq~j;*zdXtt;C@-;xfuzKW|= zT=lL)OW=fum@#cOAaKNo{c;ZWYv70lJ24mTl!6D?>p)!vjy;^^&cmM<0+#@PhL#T; zdmzihosdLyz;$Rvz*Fyj=+0;utsHcMb8&ul0VjApnjbi7A=`nr7I^9%5Zw(uh;~2d z1b>3I2{`V?*hgqPfPVtG5_dW~fj0y8;BM!2;PiK&htTrN&g)O>EqWdAj2{s6v?t$(R zIQz06+#k(^4?7;is|v9K*!S4bW0B1rn>TLm-Td(8zRl%Z+*^jWBo(YS&mi6_M?kG_b~%Dpk90Ki~WA zn@oOC@$5gl=lJx!@4esq{dK?hd%xfNUWTT-x3g@**(W|KCr)#o^%T-*Y&?;o5+2Bk^9y(&h%n z{NHf0=11 zz)tO3n60-m){4Ii{mq@}A8@)Opo`}!iEMLhjgnwye3oL==9$eEO5&?UOurVOnCVvm z$V|Tspp5ASiUAX#Oc+ye4~a@jK`wf>De*o;#`6`UE>AIzLLN*{vkY06$9SU!b8b_N z*Bg^7Q;P9?WADmgG*7ypq3wX)5%gq;z8|8|?MHW{$vBASem;qx$mAIPONv46GBGoP z#y3m1fnVvZD>9{TaFRGlso@3|bOWOpu=A&AB#xMvM%z7@QMbiKQbdzn2Pwsh;k=8O zmb&xw*$|WuL1RBxOb6*xXgQieCWEXxlPsIaz9qM~glv7Y zoQxbbrM_#Zah}#`j$#Bcy~Kd59;$j74xlBkFeQUX60=M!JGuNWxcnt^ZH3ZvB7Ujm z_lgWoC0Xz&3GdZQt4--HE`A*1^?RZAdt5ET<7O^17c$S2%sV0TIG1Vai$J%H(;YLV zSwsihO347CT*0+d08I^mvCE-a1*69}>M{Bi<26${iJY$(u!N!`)5^)N;k{al76e^a z5m;VFP#W$c#e_P5L^UJjM<$||hpmCNBMN}&6o_cZz_M`)2%aVb!A=xq7Dd@K3M;y4 zLGDq^owW|eGR>ydDZb0y>Rv~7yNwZ#aY`}%Xi9%uf@n5|JVwftzKyo0CZ(4vM!`IC zMpCN3iLrW`%IpT?m!=d4g|&*!q{)4+`^)Mfij~@)6MA8zfkn6*B#Mz5o6CExR!`|U zOBrMIIO9@pgH*%?ExTe$)trWw-PL`H2(as22X^j7^NLumV%%3emZK!^D{e%9PYldz zGW?|&X-o`YPc%#nfMW~i*izh>IP2Kl;JT(3Q=D6>)D--BK4bdjiZQRDs^64eLXU!N zwbDClY_4Kt^Ie;bo{~6}2f=wtVi54Gqw6^(k;_ABIacsEm6f24q%m|oFhfJkS zG$5NHBpSecj3=P6=QCGlXh<>E$coWe47)~~|JyVtj}8rqcoS6a{a9X>S`0!fy;9o5 z!Xt5DAAXJY;ZLu^+z|F+&$K}EQQ9-cpkjc)BA?Jgx)pO#54a|f87+#RJX+`?TC|Bs zQjFX48mdk!#)n7{?xbGoP->hn;b0KI9_7Q7o<-9bNt`!bJ%?elDg6@=e5Id87nA)G z3OZQ2iRD4E!jvLJ!mEb^dZ3ch6968hG3ouZWv=d8(=_Xi-yNU> z3Hesq2YzfLKDmV$Z0I?yBN=ljH#Ebv3LDczSLH${t@lol)xC!!!I{UAc6 zIDF5lqFh_`20ZSf84Gwu{pMR!>3KQ62*zASmM5j3Tl!4tR`luEF;UDBl+V$qRgwp| znUtI3^Ee$ZkLk(}QBQGEcKPondZa(hpLX6vsXn2XAzt{@Q+UTnoOkH+VGpNIN?UAu z9tS`4jD2xhuo?|Tuma~ks*p_xjHQeYh;DvBG-oNjTZ(M2Sv5Ru~+qf(WSRr=gH^6LH0sl z+TD%v4ED;OU@uN8WN+E0vRAk4?ZoRS_wPTTG1b3Ll}2rQzhc{4mSHcBN7MX^`1>>d zO`O58&)InyBqs}QqvYG`4BtS>c@1zq{hJ$650>9Z!;%R+s5Yb5Q1!xKeX0Bhd@C`f zYls_8yIL`wA6UA%z?=%6)XN{gx8>)c9A zz0Ugy;P~uJ);Y!&Dv5J=L(laK43E)eO46wUF#NQg6J|>pTvWrWiSk=#5`7 z*5{2CDK&@mE1ya6`I?uIx^czzlac{Y#@`;xb^ij~5cW9wiSt>y>lvKMS?V4qW1~OB z5Mps3hD>SRrBp3XD$v;dcU*eGHa_qngfTBL{^}J0+KCY79ER04h3+)VxEPcTjo6c{4D|(q**(`T|J11&!3hAcn$Anv4N{ zcHyr$h8Ana_7$rSW z)CA#k>;bp=N{QiJM;0aV$Lr+}PKj$%ETc!{FME z{qYx^dV8AMD45GJUl$md4$hs_GmePQKdq(JslNd~wi110!0Y-}PVIc1r)*DYtto{p z%yYn43R^hCA&MQmoZy>MY6bkGz`6Y&jF?A&M|sFNYb8tGb3sLN*%^SbLV6`bSY(;Q z^g^c8ycs=f`Gs79_s(>UPL>?C_y#V5k4mdz9DJtfZpJ#6=IPm0{f88va;uVkq^kd+ zrSxeyKKH;JnBK8;O|k2-{`U?Id3^3$k?TFi%bx6mZr9Vbmo>PaUY~nM(!+`y5Kvbe zYvy`tUXJGDK$88ur)D_na+?R0#KE$LnxoNEq`^2y0U$2&g~vGP!S|0hOtaB=A@vnF z26Mf(>4t-i#xP;Fk*k1m8MUPcQ`dYjoH*$i%i{SRUNaYs&&8TAI>n#Q^5>8Fb0?l8 zpMnbKq-Ed6s<3#jDXqYOwZGGT7vFdc=PQed$zNK*l;$H$?#bW zk3FE^b0FR=U8I1DsVTiWhu*tJ8mms2(r`w52yLRSwg(RZ;Q>#zr|N{VA2Y)R%)$|6 zf0a<@sd>}msvq(6mMn+1#t&XWtDel_*@!PWW_iiAGOz}A9NlowRSB9>Ai>216BY(3Nx*N30`WY$)Kp} zg@>x0_@?c8?4W`FEONN~|MRs3f0v-^Y5@xcJXd4kM+6)e@D4F!0!@phky?V z=oio<;B5llDByenFB33Vzz@G_P@5OAA-Q2|rJ-Zs&<{eB_nFJac- z&n`r80lXu}!E$bAY=JY2IhWx0gM-vx;@(yX936M2;2e2bEbpnD3P*kp%WutN`IWg5 zi#L}soRunZ_Y?2j95&ZK3;dJuj@&GkTbWhiD9mMrOY&J^W&UtpzmNfW!#JAXh+{sh z5;pl~^S%aL5#caI;}d<@3>r3t*Ku#eUqavsv+yJXTzJ+3=OmnmbWHu<_8vTPre%w;75b6AO<&q`oh z32ZB=ykh)wC#7LQ(|m=L$0UqPn4^SoY3(KbKU-)WL|4j;-2{g`i3Cfq0sSh#ljzgs zaOE-=>?C_#m2)DO2))Gb!1xK!y02yIk2ZaVA7+cVuxU4dX9sQ>^@5-sH`ctZI7oeg ztL2X&?7M!v)ojeb4Iqcq0#}xS^HW{}?n2jRdD8OCXL-`{!^P&we`d{H4biCDqUkMq zG!WXr&Jbp^PhA@i>48oSgW)J+?>m~iR=0me^XY1xHyCWv^o?OZn*~~Tbc5RLjmEUr zcqFJHBLY_+09|WWL}Q!4=Ly6_?+-#Y4OxO$Wi@xTX!pmpkWW+VHfp|2jLjBO*M%Zp z-zH1!lI-R#_4arm=)cn&jBBh8avM7%;i#@!pegK+L;d;5c3LL0+0Y`aU!m!4U5|>% zvOi3wUmg$n^guWSV$3VXSZ^_GL6ImmzhdY|J+mh21D}iYY z>8d;C31eYdoaFFD21+2C#^hH9N$-SNnYN zXjJnv)=Bok4%?iE?LKE{5w+Zn`wDSiw1m{01>fy6C;hW#!yW6A|P2Gtg-@2b^HAy znAkds#l}!Tw}zIeUtGctgP$g%uGPH$F2)Yo{S_M5L$MI)!lnsT-cs%l1{u4>p(5z< zb0akp4uoi-S7oW4*jr(r>Wk{^xI+y<2L?LXOA~EXV~^)_L^bTRYFmO|4aFhyCWq?R zCP_TSRBvZ&gQ{%~Aol)>scM}Xi)g+;N5H3U^oIOFim$r~ABn8@MfBx?AX)NNV0`d# zNY!ahflxAB^90(X-e?!4mKfWYy(X?jyPCCV2kjaxL|quk2zKqr z!g{p^e499ev7-+3>nOlNx+SO)kv5otZH|V0S}X?iR^n~xiedA#BCok)n5#xDwz6UE zss>M0rL`{8E3Ez7tfx?e={#WHd(nR!_mjBC=VQ$Q8S_Q8SUjlXS7w{L$|GHVMyr^K zoUY(A%mqELdNpJC9}yPP9#mD{CSp~MX*zbPt^t{GW#VOok9@l~rmYP>h&b5XCCB3J zzMwZ2(_(UGJf_Q#AwNL%B*tkYUJ9feBGR88vxh6gwL?INmTH-QT{V16jQ`HTV<`ug?=nW#Nb#|3&p$&nMh99C) zcnLA2pGTS+#m-0Cq8de+lJ+kqns!dvsa`h$pt0T-G-tr=Y^CaQK|TeqW>qn|mS%|3ix3BmsX|5|cf zOt;d!3Za}C4|ivKU_(3{kIBLeN<;GEMdg(h6|u$it4+%vw3G{EJ=!HNj>j}L=xx`6 z7!UKhAcsR359075^N{JpDwkUtkVt}lnSKX~q=jg*)d;sx1PR5qPQhpWc+?N6VA40j zCM$Xt%U-+a5M}0kXIj6td^K^L^49a;HMY(u$y%(5XrZ60S|y5Ba;gl{TQ}W4cy)?# zpyT=`k4#mT5!Exwoe1^m;w||zQcP3BcgGBNTU%v{Y;O0ok!do;u0kAItHY-HYIQV# zUF8k(O`uyRSn{7bVKa3Z8AraRdYG{lczjb{SgCc%dRQ(?v)za#@@t!EQ}K<>rQjQ4 zz4iTJ=M@&jJM&hmC(Qyc$k;izg+F=Y@0&2S@gGy@h1h`Hid4d=-18{as=Ev zh0fOhNzlrg4F1&P(GcGNB9TV52(~14nC6$;;S~DGb!OI-y-27s`IEMwip1rQ*c2zmVi&P2m;BHpKtkKwx-zgAqF{_RuNsc4Gc;ZGUY z<3$&zcdFeN+mCrWG^%UJ51YHD7HbH}DBoCPdo3r@Jg>h8Mq8J%={FXS3&Ve~k73V2O->Hn9YA1YEZO-w@HJe>?IW+rj9njO}P8f4g&7N{TR^zz9HIe3mN+%?w!E*1HO%$=uZQFw-oV*Hhoup1-Fbg zeN(0Hq|_#ODZYbJo4$v-MH{oc_^X03{FRS2{|Jv-%mrY0*T}AuyHdN(?_#_2b{FlI zcbDz1*j>F_+1hm|tM%Fu0_v3-Eln&kHnf5ySSY}tX! z*iYskS-#Js9a-MFEuK+R>EzaQq*skb5{aa#ZZXufl~CgewdM8>wKo|vYOAU${jTY! z?!W%o$*XTYQ|zZ>!)GQiJ{^1M%vniacjg5-K6vIe312?*YYBfL;hpj5HqxCNX}FcK z*q6O5{LrWGD)P>=Yt;oM^AR)!?ofZS7_eI6J3RylFJa6}AG9K5n=zmyp*b8!$<+mQ z?%1J(uVajZ8(YCxn3RjH31jKdC4hN#k@rEM%9uLa&K%uZ)95on+q)1S3X$gR&I6ci zDr1{#)3JydVeH%UKmvhPpkIzYt2Lo4Y8^4QjeORjQ!GHQ?rI2qW_N>;MubcTs+V7n!hWwpOVjWntv6I!#uD2 z+cJlsaxvO2hHT|g)X>17H7_v8vj-c7xga^=9RQiLIVMg<0M7JX1qcj77pk&Z)%3Uw3XL)%U0ZB5? znSO37KFK?e2&|8hP0GNGJ&3%==uC`9ZDky11Y)$-T`IndY-P)BE6T+>#QLI)^-{Jq zF+!7n(N-S#K!y?YA2vKQ3od@f1^ISr5^Hh;Tv74dF6+it{4^HMqWpkLF|RDDArjVP zs!Fb3BWUv_4LLX?Y1Yd3q|YvO-JT<;nCmOwiJ-5bhlvGR-Sva)+RpZal}6?p*bSYDK70a)Y-Q&xWA{m8 zvvN0faijA{%X@F6aP!J}cMp$^adD96 zx+-{XTQ$%1E!J|I)zF5}XF_+8+ko9JPHAEy_=<0!0TBrA%Lq6);(KHT#WUI>&gHz% zgJf_~wz5v1fL#@n)jT+CF5O>{Q~6;fxjWB&&(!RLr1kn_dFY>^4P%dCoSiB$efx0= zvx4qrY+^51TvUAzEL$0Yt|m@#c-;L>X}xNPWc&z>cJVt+oadszvu{?k1<$58^2Ym_ z`IfCb3@&eMW#$`Pw8P9!8dJR_3CXoG`vhG4rL+{2ysDKLNRdvAwcxv!R|}puzpsgR zHSzl7x7x&*ty~Y5t*n+dyl>-8fV}?}Ah-4vd|QI&d;_mjkPodrit)rNzypI5F9T%X z!9hF%WI9jc92avlwF@sdH&c(m05auu6Ll|!!4Y$zlc$@pfRmp@^=zh4@>OjBBi`E~ zLuoBs4t8+F+6{s09D&6E1uCo&@AZ(v@ymke=$sqj4|!LB!W-AF0Wg=t+G-$$FGCm( z7qyFn2S~h49I=%};Mm?1OYn#$NA6bbaZcN?CSthsvIo_W9{POfb11Eu+DggV3U#8U z>e|G4@s1{5`zNxI>nh|F0~QC4TbDN~_X4vj11CYAcmZtW&Gcs9DX{{$S-*(%(1}-J z9~0v_Y?XnN=0`N~Gw}*mr?BStwp}FqcITly&02!VIIOJgv2Mhr@d@w($4Tw?nBDZ? zJoc_8Y8Js!JB%+VGD7x=0ULqXrPj^UR)-;cVf>?qRvZ?;#EU{TuSSf&z^jsCtS*Wb zW1TF=tTy@85pM<$avM#!XM{Y~aSYP&R2SML^9D)mNYh^}cVU~D692-*TgZmxEhJ^c zBLXG-A}s+(-X#@=X$p7adv;afR^*2xxq}zR?NQKI4Uf^|t3sxO z=W8CI5b|sLDC*0&D5aE_*vEKwxPk{?O~2{ng;Fwnk=|X7Uig2ZS3I@37~0>yk+JT^ z3bQmYILxE`7G63U7~IuJzk*o{4=h0V-HmrF_B}fIAH!o>G;|A&a!nl9N{2$eXPd5Q z^*!5F{^`8N7Pq3Pt`T=E)Pl#;m3Y6EKCcCjrF|iLh-ZgZwFVETPm>2RL`C2>%7-R~ zw0u?l8@Amh4i)Y}axmX(+gA^@iDN{0n9>E3Pc-?53d=4W%bxN~mdG<6nX^}J#=&Z@ z+#VaaMD)#X5l7@K)RvG+zx_P4gpYvS%DlR;tBG>LOyotR-B`8m>V)n$|zt|K+gaEkaF2=fx^ zGx~58IJBz)DIfGbIwbJph$H16wm)75uD#B&z0igK`7vkw?=Bp3;nOaB!iB|%zTu9) z;=&y+{P9Xh;-CwUyYN>oJnO=@U0CMoUFkx<3vY4ZdKac$SoFWg)!*U5dtA8Dg_;XL z>B1XaxYUKrg}?DT7ROy!wEuo^@Cvq*Zs-r%JY^nMwuQ0f-V)|rg9`<}$KHmXS`ONk zW$a3`oUys|<>hR747cv3cntDHC;nW$|1%EvYIC%6~muK_k;OwHpfXTI1x=34_;zaX{9^-KD+rZ?l6^*!{@7Ue6;SS4gD>&sK~ zY5hLMSW$KakG^-%cS4q+oD29KsZ$|!`exPC7=L9MiKwC9YM5=N(ThKG&Zc(6zi6<_K)HR(Jx0{jc_b4u z&2)#EjwiM6ASdx~Xo7R^@R$iuu z`}Ot7gaHp$6n~VR5BX4ftG*$TibS_dUu;7ugxV5uvppHJdW<#tO>0=H-`ti=)Zbhi z>*+ytdUQ+*e@53+$#}vv(rkT+-W%z`pHcN_+GHgv=w(M|aNS@}mUX8MgRzF9 zg_xeOVDjT0J!V{F@ifyTy_v1L(HBRYpJlqf0iTSP$H?jYLnSx}qs_ zUA%`Pxet_Rj}b}eW`D{+9NkGh5$QEDK*3K$5dY()x|Kk%+t~o5F(qV#+^j9pCG|{+ z?o!%2KGS?>W1XD#U|C0xVWik;))LP|w)7aC+tNlPrp33UBkBHTOh3c^zVr^uNcV@0 zbayh{8^OvqCoP$w@0OqhI^)splEBzu55~<_I&CCO$52}gzy8glhSSNYk;#C(lXN@! zGbo_Wc&`!4V6FyX?6%fB*SBhcdS_i`c53l|4i91v(|J{cn+akF3Twh~d3hzOvVa`~2)&uPYrPxm9AQ;&DQL{IEL z(>Ck09e~f^KHmqN z{(tut`rE)6?(C=0%W#hp96=8Q9|ffUQTge91#EEfLAtMT=huO*0yd+Qd<)^G1k)}~cmG}&|C6f{l`;G=l{M_erw;Q4*z?%8AA904ARo3r`waXy D4d3^w literal 0 HcmV?d00001 diff --git a/lib/sqlalchemy/databases/__init__.py b/lib/sqlalchemy/databases/__init__.py index 915eefa4a5..0bfc93747f 100644 --- a/lib/sqlalchemy/databases/__init__.py +++ b/lib/sqlalchemy/databases/__init__.py @@ -1,5 +1,6 @@ # databases/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -12,7 +13,6 @@ from ..dialects.postgresql import base as postgresql postgres = postgresql from ..dialects.mysql import base as mysql -from ..dialects.drizzle import base as drizzle from ..dialects.oracle import base as oracle from ..dialects.firebird import base as firebird from ..dialects.mssql import base as mssql @@ -20,7 +20,6 @@ __all__ = ( - 'drizzle', 'firebird', 'mssql', 'mysql', @@ -28,4 +27,4 @@ 'sqlite', 'oracle', 'sybase', - ) +) diff --git a/lib/sqlalchemy/dialects/__init__.py b/lib/sqlalchemy/dialects/__init__.py index 974d4f7875..5653f5b659 100644 --- a/lib/sqlalchemy/dialects/__init__.py +++ b/lib/sqlalchemy/dialects/__init__.py @@ -1,11 +1,11 @@ # dialects/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php __all__ = ( - 'drizzle', 'firebird', 'mssql', 'mysql', @@ -13,10 +13,11 @@ 'postgresql', 'sqlite', 'sybase', - ) +) from .. import util + def _auto_fn(name): """default dialect importer. diff --git a/lib/sqlalchemy/dialects/drizzle/__init__.py b/lib/sqlalchemy/dialects/drizzle/__init__.py deleted file mode 100644 index 1392b8e28f..0000000000 --- a/lib/sqlalchemy/dialects/drizzle/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -from sqlalchemy.dialects.drizzle import base, mysqldb - -base.dialect = mysqldb.dialect - -from sqlalchemy.dialects.drizzle.base import \ - BIGINT, BINARY, BLOB, \ - BOOLEAN, CHAR, DATE, \ - DATETIME, DECIMAL, DOUBLE, \ - ENUM, FLOAT, INTEGER, \ - NUMERIC, REAL, TEXT, \ - TIME, TIMESTAMP, VARBINARY, \ - VARCHAR, dialect - -__all__ = ( - 'BIGINT', 'BINARY', 'BLOB', - 'BOOLEAN', 'CHAR', 'DATE', - 'DATETIME', 'DECIMAL', 'DOUBLE', - 'ENUM', 'FLOAT', 'INTEGER', - 'NUMERIC', 'REAL', 'TEXT', - 'TIME', 'TIMESTAMP', 'VARBINARY', - 'VARCHAR', 'dialect' -) diff --git a/lib/sqlalchemy/dialects/drizzle/base.py b/lib/sqlalchemy/dialects/drizzle/base.py deleted file mode 100644 index b5addb422f..0000000000 --- a/lib/sqlalchemy/dialects/drizzle/base.py +++ /dev/null @@ -1,498 +0,0 @@ -# drizzle/base.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors -# Copyright (C) 2010-2011 Monty Taylor -# -# This module is part of SQLAlchemy and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - - -""" - -.. dialect:: drizzle - :name: Drizzle - -Drizzle is a variant of MySQL. Unlike MySQL, Drizzle's default storage engine -is InnoDB (transactions, foreign-keys) rather than MyISAM. For more -`Notable Differences `_, visit -the `Drizzle Documentation `_. - -The SQLAlchemy Drizzle dialect leans heavily on the MySQL dialect, so much of -the :doc:`SQLAlchemy MySQL ` documentation is also relevant. - - -""" - -from sqlalchemy import exc -from sqlalchemy import log -from sqlalchemy import types as sqltypes -from sqlalchemy.engine import reflection -from sqlalchemy.dialects.mysql import base as mysql_dialect -from sqlalchemy.types import DATE, DATETIME, BOOLEAN, TIME, \ - BLOB, BINARY, VARBINARY - - -class _NumericType(object): - """Base for Drizzle numeric types.""" - - def __init__(self, **kw): - super(_NumericType, self).__init__(**kw) - - -class _FloatType(_NumericType, sqltypes.Float): - def __init__(self, precision=None, scale=None, asdecimal=True, **kw): - if isinstance(self, (REAL, DOUBLE)) and \ - ( - (precision is None and scale is not None) or - (precision is not None and scale is None) - ): - raise exc.ArgumentError( - "You must specify both precision and scale or omit " - "both altogether.") - - super(_FloatType, self).__init__(precision=precision, - asdecimal=asdecimal, **kw) - self.scale = scale - - -class _StringType(mysql_dialect._StringType): - """Base for Drizzle string types.""" - - def __init__(self, collation=None, binary=False, **kw): - kw['national'] = False - super(_StringType, self).__init__(collation=collation, binary=binary, - **kw) - - -class NUMERIC(_NumericType, sqltypes.NUMERIC): - """Drizzle NUMERIC type.""" - - __visit_name__ = 'NUMERIC' - - def __init__(self, precision=None, scale=None, asdecimal=True, **kw): - """Construct a NUMERIC. - - :param precision: Total digits in this number. If scale and precision - are both None, values are stored to limits allowed by the server. - - :param scale: The number of digits after the decimal point. - - """ - - super(NUMERIC, self).__init__(precision=precision, scale=scale, - asdecimal=asdecimal, **kw) - - -class DECIMAL(_NumericType, sqltypes.DECIMAL): - """Drizzle DECIMAL type.""" - - __visit_name__ = 'DECIMAL' - - def __init__(self, precision=None, scale=None, asdecimal=True, **kw): - """Construct a DECIMAL. - - :param precision: Total digits in this number. If scale and precision - are both None, values are stored to limits allowed by the server. - - :param scale: The number of digits after the decimal point. - - """ - super(DECIMAL, self).__init__(precision=precision, scale=scale, - asdecimal=asdecimal, **kw) - - -class DOUBLE(_FloatType): - """Drizzle DOUBLE type.""" - - __visit_name__ = 'DOUBLE' - - def __init__(self, precision=None, scale=None, asdecimal=True, **kw): - """Construct a DOUBLE. - - :param precision: Total digits in this number. If scale and precision - are both None, values are stored to limits allowed by the server. - - :param scale: The number of digits after the decimal point. - - """ - - super(DOUBLE, self).__init__(precision=precision, scale=scale, - asdecimal=asdecimal, **kw) - - -class REAL(_FloatType, sqltypes.REAL): - """Drizzle REAL type.""" - - __visit_name__ = 'REAL' - - def __init__(self, precision=None, scale=None, asdecimal=True, **kw): - """Construct a REAL. - - :param precision: Total digits in this number. If scale and precision - are both None, values are stored to limits allowed by the server. - - :param scale: The number of digits after the decimal point. - - """ - - super(REAL, self).__init__(precision=precision, scale=scale, - asdecimal=asdecimal, **kw) - - -class FLOAT(_FloatType, sqltypes.FLOAT): - """Drizzle FLOAT type.""" - - __visit_name__ = 'FLOAT' - - def __init__(self, precision=None, scale=None, asdecimal=False, **kw): - """Construct a FLOAT. - - :param precision: Total digits in this number. If scale and precision - are both None, values are stored to limits allowed by the server. - - :param scale: The number of digits after the decimal point. - - """ - - super(FLOAT, self).__init__(precision=precision, scale=scale, - asdecimal=asdecimal, **kw) - - def bind_processor(self, dialect): - return None - - -class INTEGER(sqltypes.INTEGER): - """Drizzle INTEGER type.""" - - __visit_name__ = 'INTEGER' - - def __init__(self, **kw): - """Construct an INTEGER.""" - - super(INTEGER, self).__init__(**kw) - - -class BIGINT(sqltypes.BIGINT): - """Drizzle BIGINTEGER type.""" - - __visit_name__ = 'BIGINT' - - def __init__(self, **kw): - """Construct a BIGINTEGER.""" - - super(BIGINT, self).__init__(**kw) - - -class TIME(mysql_dialect.TIME): - """Drizzle TIME type.""" - - -class TIMESTAMP(sqltypes.TIMESTAMP): - """Drizzle TIMESTAMP type.""" - - __visit_name__ = 'TIMESTAMP' - - -class TEXT(_StringType, sqltypes.TEXT): - """Drizzle TEXT type, for text up to 2^16 characters.""" - - __visit_name__ = 'TEXT' - - def __init__(self, length=None, **kw): - """Construct a TEXT. - - :param length: Optional, if provided the server may optimize storage - by substituting the smallest TEXT type sufficient to store - ``length`` characters. - - :param collation: Optional, a column-level collation for this string - value. Takes precedence to 'binary' short-hand. - - :param binary: Defaults to False: short-hand, pick the binary - collation type that matches the column's character set. Generates - BINARY in schema. This does not affect the type of data stored, - only the collation of character data. - - """ - - super(TEXT, self).__init__(length=length, **kw) - - -class VARCHAR(_StringType, sqltypes.VARCHAR): - """Drizzle VARCHAR type, for variable-length character data.""" - - __visit_name__ = 'VARCHAR' - - def __init__(self, length=None, **kwargs): - """Construct a VARCHAR. - - :param collation: Optional, a column-level collation for this string - value. Takes precedence to 'binary' short-hand. - - :param binary: Defaults to False: short-hand, pick the binary - collation type that matches the column's character set. Generates - BINARY in schema. This does not affect the type of data stored, - only the collation of character data. - - """ - - super(VARCHAR, self).__init__(length=length, **kwargs) - - -class CHAR(_StringType, sqltypes.CHAR): - """Drizzle CHAR type, for fixed-length character data.""" - - __visit_name__ = 'CHAR' - - def __init__(self, length=None, **kwargs): - """Construct a CHAR. - - :param length: Maximum data length, in characters. - - :param binary: Optional, use the default binary collation for the - national character set. This does not affect the type of data - stored, use a BINARY type for binary data. - - :param collation: Optional, request a particular collation. Must be - compatible with the national character set. - - """ - - super(CHAR, self).__init__(length=length, **kwargs) - - -class ENUM(mysql_dialect.ENUM): - """Drizzle ENUM type.""" - - def __init__(self, *enums, **kw): - """Construct an ENUM. - - Example: - - Column('myenum', ENUM("foo", "bar", "baz")) - - :param enums: The range of valid values for this ENUM. Values will be - quoted when generating the schema according to the quoting flag (see - below). - - :param strict: Defaults to False: ensure that a given value is in this - ENUM's range of permissible values when inserting or updating rows. - Note that Drizzle will not raise a fatal error if you attempt to - store an out of range value- an alternate value will be stored - instead. - (See Drizzle ENUM documentation.) - - :param collation: Optional, a column-level collation for this string - value. Takes precedence to 'binary' short-hand. - - :param binary: Defaults to False: short-hand, pick the binary - collation type that matches the column's character set. Generates - BINARY in schema. This does not affect the type of data stored, - only the collation of character data. - - :param quoting: Defaults to 'auto': automatically determine enum value - quoting. If all enum values are surrounded by the same quoting - character, then use 'quoted' mode. Otherwise, use 'unquoted' mode. - - 'quoted': values in enums are already quoted, they will be used - directly when generating the schema - this usage is deprecated. - - 'unquoted': values in enums are not quoted, they will be escaped and - surrounded by single quotes when generating the schema. - - Previous versions of this type always required manually quoted - values to be supplied; future versions will always quote the string - literals for you. This is a transitional option. - - """ - - super(ENUM, self).__init__(*enums, **kw) - - -class _DrizzleBoolean(sqltypes.Boolean): - def get_dbapi_type(self, dbapi): - return dbapi.NUMERIC - - -colspecs = { - sqltypes.Numeric: NUMERIC, - sqltypes.Float: FLOAT, - sqltypes.Time: TIME, - sqltypes.Enum: ENUM, - sqltypes.Boolean: _DrizzleBoolean, -} - - -# All the types we have in Drizzle -ischema_names = { - 'BIGINT': BIGINT, - 'BINARY': BINARY, - 'BLOB': BLOB, - 'BOOLEAN': BOOLEAN, - 'CHAR': CHAR, - 'DATE': DATE, - 'DATETIME': DATETIME, - 'DECIMAL': DECIMAL, - 'DOUBLE': DOUBLE, - 'ENUM': ENUM, - 'FLOAT': FLOAT, - 'INT': INTEGER, - 'INTEGER': INTEGER, - 'NUMERIC': NUMERIC, - 'TEXT': TEXT, - 'TIME': TIME, - 'TIMESTAMP': TIMESTAMP, - 'VARBINARY': VARBINARY, - 'VARCHAR': VARCHAR, -} - - -class DrizzleCompiler(mysql_dialect.MySQLCompiler): - - def visit_typeclause(self, typeclause): - type_ = typeclause.type.dialect_impl(self.dialect) - if isinstance(type_, sqltypes.Integer): - return 'INTEGER' - else: - return super(DrizzleCompiler, self).visit_typeclause(typeclause) - - def visit_cast(self, cast, **kwargs): - type_ = self.process(cast.typeclause) - if type_ is None: - return self.process(cast.clause) - - return 'CAST(%s AS %s)' % (self.process(cast.clause), type_) - - -class DrizzleDDLCompiler(mysql_dialect.MySQLDDLCompiler): - pass - - -class DrizzleTypeCompiler(mysql_dialect.MySQLTypeCompiler): - def _extend_numeric(self, type_, spec): - return spec - - def _extend_string(self, type_, defaults, spec): - """Extend a string-type declaration with standard SQL - COLLATE annotations and Drizzle specific extensions. - - """ - - def attr(name): - return getattr(type_, name, defaults.get(name)) - - if attr('collation'): - collation = 'COLLATE %s' % type_.collation - elif attr('binary'): - collation = 'BINARY' - else: - collation = None - - return ' '.join([c for c in (spec, collation) - if c is not None]) - - def visit_NCHAR(self, type): - raise NotImplementedError("Drizzle does not support NCHAR") - - def visit_NVARCHAR(self, type): - raise NotImplementedError("Drizzle does not support NVARCHAR") - - def visit_FLOAT(self, type_): - if type_.scale is not None and type_.precision is not None: - return "FLOAT(%s, %s)" % (type_.precision, type_.scale) - else: - return "FLOAT" - - def visit_BOOLEAN(self, type_): - return "BOOLEAN" - - def visit_BLOB(self, type_): - return "BLOB" - - -class DrizzleExecutionContext(mysql_dialect.MySQLExecutionContext): - pass - - -class DrizzleIdentifierPreparer(mysql_dialect.MySQLIdentifierPreparer): - pass - - -@log.class_logger -class DrizzleDialect(mysql_dialect.MySQLDialect): - """Details of the Drizzle dialect. - - Not used directly in application code. - """ - - name = 'drizzle' - - _supports_cast = True - supports_sequences = False - supports_native_boolean = True - supports_views = False - - default_paramstyle = 'format' - colspecs = colspecs - - statement_compiler = DrizzleCompiler - ddl_compiler = DrizzleDDLCompiler - type_compiler = DrizzleTypeCompiler - ischema_names = ischema_names - preparer = DrizzleIdentifierPreparer - - def on_connect(self): - """Force autocommit - Drizzle Bug#707842 doesn't set this properly""" - - def connect(conn): - conn.autocommit(False) - return connect - - @reflection.cache - def get_table_names(self, connection, schema=None, **kw): - """Return a Unicode SHOW TABLES from a given schema.""" - - if schema is not None: - current_schema = schema - else: - current_schema = self.default_schema_name - - charset = 'utf8' - rp = connection.execute("SHOW TABLES FROM %s" % - self.identifier_preparer.quote_identifier(current_schema)) - return [row[0] for row in self._compat_fetchall(rp, charset=charset)] - - @reflection.cache - def get_view_names(self, connection, schema=None, **kw): - raise NotImplementedError - - def _detect_casing(self, connection): - """Sniff out identifier case sensitivity. - - Cached per-connection. This value can not change without a server - restart. - """ - - return 0 - - def _detect_collations(self, connection): - """Pull the active COLLATIONS list from the server. - - Cached per-connection. - """ - - collations = {} - charset = self._connection_charset - rs = connection.execute( - 'SELECT CHARACTER_SET_NAME, COLLATION_NAME FROM' - ' data_dictionary.COLLATIONS') - for row in self._compat_fetchall(rs, charset): - collations[row[0]] = row[1] - return collations - - def _detect_ansiquotes(self, connection): - """Detect and adjust for the ANSI_QUOTES sql mode.""" - - self._server_ansiquotes = False - self._backslash_escapes = False - - diff --git a/lib/sqlalchemy/dialects/drizzle/mysqldb.py b/lib/sqlalchemy/dialects/drizzle/mysqldb.py deleted file mode 100644 index 7d91cc368b..0000000000 --- a/lib/sqlalchemy/dialects/drizzle/mysqldb.py +++ /dev/null @@ -1,48 +0,0 @@ -""" -.. dialect:: drizzle+mysqldb - :name: MySQL-Python - :dbapi: mysqldb - :connectstring: drizzle+mysqldb://:@[:]/ - :url: http://sourceforge.net/projects/mysql-python - - -""" - -from sqlalchemy.dialects.drizzle.base import ( - DrizzleDialect, - DrizzleExecutionContext, - DrizzleCompiler, - DrizzleIdentifierPreparer) -from sqlalchemy.connectors.mysqldb import ( - MySQLDBExecutionContext, - MySQLDBCompiler, - MySQLDBIdentifierPreparer, - MySQLDBConnector) - - -class DrizzleExecutionContext_mysqldb(MySQLDBExecutionContext, - DrizzleExecutionContext): - pass - - -class DrizzleCompiler_mysqldb(MySQLDBCompiler, DrizzleCompiler): - pass - - -class DrizzleIdentifierPreparer_mysqldb(MySQLDBIdentifierPreparer, - DrizzleIdentifierPreparer): - pass - - -class DrizzleDialect_mysqldb(MySQLDBConnector, DrizzleDialect): - execution_ctx_cls = DrizzleExecutionContext_mysqldb - statement_compiler = DrizzleCompiler_mysqldb - preparer = DrizzleIdentifierPreparer_mysqldb - - def _detect_charset(self, connection): - """Sniff out the character set in use for connection results.""" - - return 'utf8' - - -dialect = DrizzleDialect_mysqldb diff --git a/lib/sqlalchemy/dialects/firebird/__init__.py b/lib/sqlalchemy/dialects/firebird/__init__.py index 094ac3e832..f27bdc05bd 100644 --- a/lib/sqlalchemy/dialects/firebird/__init__.py +++ b/lib/sqlalchemy/dialects/firebird/__init__.py @@ -1,5 +1,6 @@ # firebird/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/lib/sqlalchemy/dialects/firebird/base.py b/lib/sqlalchemy/dialects/firebird/base.py index 21db57b68d..4dbf382335 100644 --- a/lib/sqlalchemy/dialects/firebird/base.py +++ b/lib/sqlalchemy/dialects/firebird/base.py @@ -1,5 +1,6 @@ # firebird/base.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -77,7 +78,6 @@ from sqlalchemy.engine import base, default, reflection from sqlalchemy.sql import compiler - from sqlalchemy.types import (BIGINT, BLOB, DATE, FLOAT, INTEGER, NUMERIC, SMALLINT, TEXT, TIME, TIMESTAMP, Integer) @@ -119,7 +119,7 @@ "union", "unique", "update", "upper", "user", "using", "value", "values", "varchar", "variable", "varying", "view", "wait", "when", "where", "while", "with", "work", "write", "year", - ]) +]) class _StringType(sqltypes.String): @@ -160,55 +160,55 @@ def process(value): } ischema_names = { - 'SHORT': SMALLINT, - 'LONG': INTEGER, - 'QUAD': FLOAT, - 'FLOAT': FLOAT, - 'DATE': DATE, - 'TIME': TIME, - 'TEXT': TEXT, - 'INT64': BIGINT, - 'DOUBLE': FLOAT, - 'TIMESTAMP': TIMESTAMP, + 'SHORT': SMALLINT, + 'LONG': INTEGER, + 'QUAD': FLOAT, + 'FLOAT': FLOAT, + 'DATE': DATE, + 'TIME': TIME, + 'TEXT': TEXT, + 'INT64': BIGINT, + 'DOUBLE': FLOAT, + 'TIMESTAMP': TIMESTAMP, 'VARYING': VARCHAR, 'CSTRING': CHAR, - 'BLOB': BLOB, - } + 'BLOB': BLOB, +} # TODO: date conversion types (should be implemented as _FBDateTime, # _FBDate, etc. as bind/result functionality is required) class FBTypeCompiler(compiler.GenericTypeCompiler): - def visit_boolean(self, type_): - return self.visit_SMALLINT(type_) + def visit_boolean(self, type_, **kw): + return self.visit_SMALLINT(type_, **kw) - def visit_datetime(self, type_): - return self.visit_TIMESTAMP(type_) + def visit_datetime(self, type_, **kw): + return self.visit_TIMESTAMP(type_, **kw) - def visit_TEXT(self, type_): + def visit_TEXT(self, type_, **kw): return "BLOB SUB_TYPE 1" - def visit_BLOB(self, type_): + def visit_BLOB(self, type_, **kw): return "BLOB SUB_TYPE 0" def _extend_string(self, type_, basic): - charset = getattr(type_, 'charset', None) + charset = getattr(type_, 'charset', None) if charset is None: return basic else: return '%s CHARACTER SET %s' % (basic, charset) - def visit_CHAR(self, type_): - basic = super(FBTypeCompiler, self).visit_CHAR(type_) + def visit_CHAR(self, type_, **kw): + basic = super(FBTypeCompiler, self).visit_CHAR(type_, **kw) return self._extend_string(type_, basic) - def visit_VARCHAR(self, type_): + def visit_VARCHAR(self, type_, **kw): if not type_.length: raise exc.CompileError( - "VARCHAR requires a length on dialect %s" % - self.dialect.name) - basic = super(FBTypeCompiler, self).visit_VARCHAR(type_) + "VARCHAR requires a length on dialect %s" % + self.dialect.name) + basic = super(FBTypeCompiler, self).visit_VARCHAR(type_, **kw) return self._extend_string(type_, basic) @@ -217,46 +217,46 @@ class FBCompiler(sql.compiler.SQLCompiler): ansi_bind_rules = True - #def visit_contains_op_binary(self, binary, operator, **kw): - # cant use CONTAINING b.c. it's case insensitive. + # def visit_contains_op_binary(self, binary, operator, **kw): + # cant use CONTAINING b.c. it's case insensitive. - #def visit_notcontains_op_binary(self, binary, operator, **kw): - # cant use NOT CONTAINING b.c. it's case insensitive. + # def visit_notcontains_op_binary(self, binary, operator, **kw): + # cant use NOT CONTAINING b.c. it's case insensitive. def visit_now_func(self, fn, **kw): return "CURRENT_TIMESTAMP" def visit_startswith_op_binary(self, binary, operator, **kw): return '%s STARTING WITH %s' % ( - binary.left._compiler_dispatch(self, **kw), - binary.right._compiler_dispatch(self, **kw)) + binary.left._compiler_dispatch(self, **kw), + binary.right._compiler_dispatch(self, **kw)) def visit_notstartswith_op_binary(self, binary, operator, **kw): return '%s NOT STARTING WITH %s' % ( - binary.left._compiler_dispatch(self, **kw), - binary.right._compiler_dispatch(self, **kw)) + binary.left._compiler_dispatch(self, **kw), + binary.right._compiler_dispatch(self, **kw)) def visit_mod_binary(self, binary, operator, **kw): return "mod(%s, %s)" % ( - self.process(binary.left, **kw), - self.process(binary.right, **kw)) + self.process(binary.left, **kw), + self.process(binary.right, **kw)) def visit_alias(self, alias, asfrom=False, **kwargs): if self.dialect._version_two: return super(FBCompiler, self).\ - visit_alias(alias, asfrom=asfrom, **kwargs) + visit_alias(alias, asfrom=asfrom, **kwargs) else: # Override to not use the AS keyword which FB 1.5 does not like if asfrom: alias_name = isinstance(alias.name, - expression._truncated_label) and \ - self._truncated_identifier("alias", - alias.name) or alias.name + expression._truncated_label) and \ + self._truncated_identifier("alias", + alias.name) or alias.name return self.process( - alias.original, asfrom=asfrom, **kwargs) + \ - " " + \ - self.preparer.format_alias(alias, alias_name) + alias.original, asfrom=asfrom, **kwargs) + \ + " " + \ + self.preparer.format_alias(alias, alias_name) else: return self.process(alias.original, **kwargs) @@ -293,31 +293,31 @@ def default_from(self): def visit_sequence(self, seq): return "gen_id(%s, 1)" % self.preparer.format_sequence(seq) - def get_select_precolumns(self, select): + def get_select_precolumns(self, select, **kw): """Called when building a ``SELECT`` statement, position is just before column list Firebird puts the limit and offset right after the ``SELECT``... """ result = "" - if select._limit: - result += "FIRST %s " % self.process(sql.literal(select._limit)) - if select._offset: - result += "SKIP %s " % self.process(sql.literal(select._offset)) + if select._limit_clause is not None: + result += "FIRST %s " % self.process(select._limit_clause, **kw) + if select._offset_clause is not None: + result += "SKIP %s " % self.process(select._offset_clause, **kw) if select._distinct: result += "DISTINCT " return result - def limit_clause(self, select): + def limit_clause(self, select, **kw): """Already taken care of in the `get_select_precolumns` method.""" return "" def returning_clause(self, stmt, returning_cols): columns = [ - self._label_select_column(None, c, True, False, {}) - for c in expression._select_iterables(returning_cols) - ] + self._label_select_column(None, c, True, False, {}) + for c in expression._select_iterables(returning_cols) + ] return 'RETURNING ' + ', '.join(columns) @@ -332,34 +332,35 @@ def visit_create_sequence(self, create): # http://www.firebirdsql.org/manual/generatorguide-sqlsyntax.html if create.element.start is not None: raise NotImplemented( - "Firebird SEQUENCE doesn't support START WITH") + "Firebird SEQUENCE doesn't support START WITH") if create.element.increment is not None: raise NotImplemented( - "Firebird SEQUENCE doesn't support INCREMENT BY") + "Firebird SEQUENCE doesn't support INCREMENT BY") if self.dialect._version_two: return "CREATE SEQUENCE %s" % \ - self.preparer.format_sequence(create.element) + self.preparer.format_sequence(create.element) else: return "CREATE GENERATOR %s" % \ - self.preparer.format_sequence(create.element) + self.preparer.format_sequence(create.element) def visit_drop_sequence(self, drop): """Generate a ``DROP GENERATOR`` statement for the sequence.""" if self.dialect._version_two: return "DROP SEQUENCE %s" % \ - self.preparer.format_sequence(drop.element) + self.preparer.format_sequence(drop.element) else: return "DROP GENERATOR %s" % \ - self.preparer.format_sequence(drop.element) + self.preparer.format_sequence(drop.element) class FBIdentifierPreparer(sql.compiler.IdentifierPreparer): """Install Firebird specific reserved words.""" reserved_words = RESERVED_WORDS - illegal_initial_characters = compiler.ILLEGAL_INITIAL_CHARACTERS.union(['_']) + illegal_initial_characters = compiler.ILLEGAL_INITIAL_CHARACTERS.union( + ['_']) def __init__(self, dialect): super(FBIdentifierPreparer, self).__init__(dialect, omit_schema=True) @@ -370,10 +371,10 @@ def fire_sequence(self, seq, type_): """Get the next value from the sequence using ``gen_id()``.""" return self._execute_scalar( - "SELECT gen_id(%s, 1) FROM rdb$database" % - self.dialect.identifier_preparer.format_sequence(seq), - type_ - ) + "SELECT gen_id(%s, 1) FROM rdb$database" % + self.dialect.identifier_preparer.format_sequence(seq), + type_ + ) class FBDialect(default.DefaultDialect): @@ -411,12 +412,12 @@ class FBDialect(default.DefaultDialect): def initialize(self, connection): super(FBDialect, self).initialize(connection) - self._version_two = ('firebird' in self.server_version_info and \ - self.server_version_info >= (2, ) - ) or \ - ('interbase' in self.server_version_info and \ + self._version_two = ('firebird' in self.server_version_info and + self.server_version_info >= (2, ) + ) or \ + ('interbase' in self.server_version_info and self.server_version_info >= (6, ) - ) + ) if not self._version_two: # TODO: whatever other pre < 2.0 stuff goes here @@ -426,8 +427,8 @@ def initialize(self, connection): sqltypes.DateTime: sqltypes.DATE } - self.implicit_returning = self._version_two and \ - self.__dict__.get('implicit_returning', True) + self.implicit_returning = self._version_two and \ + self.__dict__.get('implicit_returning', True) def normalize_name(self, name): # Remove trailing spaces: FB uses a CHAR() type, @@ -436,7 +437,7 @@ def normalize_name(self, name): if name is None: return None elif name.upper() == name and \ - not self.identifier_preparer._requires_quotes(name.lower()): + not self.identifier_preparer._requires_quotes(name.lower()): return name.lower() else: return name @@ -445,7 +446,7 @@ def denormalize_name(self, name): if name is None: return None elif name.lower() == name and \ - not self.identifier_preparer._requires_quotes(name.lower()): + not self.identifier_preparer._requires_quotes(name.lower()): return name.upper() else: return name @@ -539,8 +540,8 @@ def get_pk_constraint(self, connection, table_name, schema=None, **kw): @reflection.cache def get_column_sequence(self, connection, - table_name, column_name, - schema=None, **kw): + table_name, column_name, + schema=None, **kw): tablename = self.denormalize_name(table_name) colname = self.denormalize_name(column_name) # Heuristic-query to determine the generator associated to a PK field @@ -613,8 +614,8 @@ def get_columns(self, connection, table_name, schema=None, **kw): coltype = sqltypes.NULLTYPE elif issubclass(coltype, Integer) and row['fprec'] != 0: coltype = NUMERIC( - precision=row['fprec'], - scale=row['fscale'] * -1) + precision=row['fprec'], + scale=row['fscale'] * -1) elif colspec in ('VARYING', 'CSTRING'): coltype = coltype(row['flen']) elif colspec == 'TEXT': @@ -636,8 +637,8 @@ def get_columns(self, connection, table_name, schema=None, **kw): # (see also http://tracker.firebirdsql.org/browse/CORE-356) defexpr = row['fdefault'].lstrip() assert defexpr[:8].rstrip().upper() == \ - 'DEFAULT', "Unrecognized default value: %s" % \ - defexpr + 'DEFAULT', "Unrecognized default value: %s" % \ + defexpr defvalue = defexpr[8:].strip() if defvalue == 'NULL': # Redundant @@ -700,9 +701,9 @@ def get_foreign_keys(self, connection, table_name, schema=None, **kw): fk['name'] = cname fk['referred_table'] = self.normalize_name(row['targetrname']) fk['constrained_columns'].append( - self.normalize_name(row['fname'])) + self.normalize_name(row['fname'])) fk['referred_columns'].append( - self.normalize_name(row['targetfname'])) + self.normalize_name(row['targetfname'])) return list(fks.values()) @reflection.cache @@ -732,7 +733,6 @@ def get_indexes(self, connection, table_name, schema=None, **kw): indexrec['unique'] = bool(row['unique_flag']) indexrec['column_names'].append( - self.normalize_name(row['field_name'])) + self.normalize_name(row['field_name'])) return list(indexes.values()) - diff --git a/lib/sqlalchemy/dialects/firebird/fdb.py b/lib/sqlalchemy/dialects/firebird/fdb.py index 4d94ef0d5d..aff8cff15f 100644 --- a/lib/sqlalchemy/dialects/firebird/fdb.py +++ b/lib/sqlalchemy/dialects/firebird/fdb.py @@ -1,5 +1,6 @@ # firebird/fdb.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -8,7 +9,8 @@ .. dialect:: firebird+fdb :name: fdb :dbapi: pyodbc - :connectstring: firebird+fdb://user:password@host:port/path/to/db[?key=value&key=value...] + :connectstring: firebird+fdb://user:password@host:port/path/to/db\ +[?key=value&key=value...] :url: http://pypi.python.org/pypi/fdb/ fdb is a kinterbasdb compatible DBAPI for Firebird. @@ -22,8 +24,9 @@ Arguments ---------- -The ``fdb`` dialect is based on the :mod:`sqlalchemy.dialects.firebird.kinterbasdb` -dialect, however does not accept every argument that Kinterbasdb does. +The ``fdb`` dialect is based on the +:mod:`sqlalchemy.dialects.firebird.kinterbasdb` dialect, however does not +accept every argument that Kinterbasdb does. * ``enable_rowcount`` - True by default, setting this to False disables the usage of "cursor.rowcount" with the @@ -60,8 +63,8 @@ .. seealso:: - http://pythonhosted.org/fdb/usage-guide.html#retaining-transactions - information - on the "retaining" flag. + http://pythonhosted.org/fdb/usage-guide.html#retaining-transactions + - information on the "retaining" flag. """ @@ -72,14 +75,14 @@ class FBDialect_fdb(FBDialect_kinterbasdb): def __init__(self, enable_rowcount=True, - retaining=False, **kwargs): + retaining=False, **kwargs): super(FBDialect_fdb, self).__init__( - enable_rowcount=enable_rowcount, - retaining=retaining, **kwargs) + enable_rowcount=enable_rowcount, + retaining=retaining, **kwargs) @classmethod def dbapi(cls): - return __import__('fdb') + return __import__('fdb') def create_connect_args(self, url): opts = url.translate_connect_args(username='user') diff --git a/lib/sqlalchemy/dialects/firebird/kinterbasdb.py b/lib/sqlalchemy/dialects/firebird/kinterbasdb.py index b8a83a07be..3df9f736b0 100644 --- a/lib/sqlalchemy/dialects/firebird/kinterbasdb.py +++ b/lib/sqlalchemy/dialects/firebird/kinterbasdb.py @@ -1,5 +1,6 @@ # firebird/kinterbasdb.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -8,15 +9,16 @@ .. dialect:: firebird+kinterbasdb :name: kinterbasdb :dbapi: kinterbasdb - :connectstring: firebird+kinterbasdb://user:password@host:port/path/to/db[?key=value&key=value...] + :connectstring: firebird+kinterbasdb://user:password@host:port/path/to/db\ +[?key=value&key=value...] :url: http://firebirdsql.org/index.php?op=devel&sub=python Arguments ---------- The Kinterbasdb backend accepts the ``enable_rowcount`` and ``retaining`` -arguments accepted by the :mod:`sqlalchemy.dialects.firebird.fdb` dialect. In addition, it -also accepts the following: +arguments accepted by the :mod:`sqlalchemy.dialects.firebird.fdb` dialect. +In addition, it also accepts the following: * ``type_conv`` - select the kind of mapping done on the types: by default SQLAlchemy uses 200 with Unicode, datetime and decimal support. See @@ -24,7 +26,7 @@ * ``concurrency_level`` - set the backend policy with regards to threading issues: by default SQLAlchemy uses policy 1. See the linked documents - below for futher information. + below for further information. .. seealso:: @@ -51,9 +53,11 @@ def process(value): return value return process + class _FBNumeric_kinterbasdb(_kinterbasdb_numeric, sqltypes.Numeric): pass + class _FBFloat_kinterbasdb(_kinterbasdb_numeric, sqltypes.Float): pass @@ -62,7 +66,7 @@ class FBExecutionContext_kinterbasdb(FBExecutionContext): @property def rowcount(self): if self.execution_options.get('enable_rowcount', - self.dialect.enable_rowcount): + self.dialect.enable_rowcount): return self.cursor.rowcount else: return -1 @@ -86,8 +90,8 @@ class FBDialect_kinterbasdb(FBDialect): ) def __init__(self, type_conv=200, concurrency_level=1, - enable_rowcount=True, - retaining=False, **kwargs): + enable_rowcount=True, + retaining=False, **kwargs): super(FBDialect_kinterbasdb, self).__init__(**kwargs) self.enable_rowcount = enable_rowcount self.type_conv = type_conv @@ -122,7 +126,7 @@ def create_connect_args(self, url): type_conv = opts.pop('type_conv', self.type_conv) concurrency_level = opts.pop('concurrency_level', - self.concurrency_level) + self.concurrency_level) if self.dbapi is not None: initialized = getattr(self.dbapi, 'initialized', None) @@ -133,7 +137,7 @@ def create_connect_args(self, url): initialized = getattr(self.dbapi, '_initialized', False) if not initialized: self.dbapi.init(type_conv=type_conv, - concurrency_level=concurrency_level) + concurrency_level=concurrency_level) return ([], opts) def _get_server_version_info(self, connection): @@ -155,10 +159,11 @@ def _get_server_version_info(self, connection): return self._parse_version_info(version) def _parse_version_info(self, version): - m = match('\w+-V(\d+)\.(\d+)\.(\d+)\.(\d+)( \w+ (\d+)\.(\d+))?', version) + m = match( + '\w+-V(\d+)\.(\d+)\.(\d+)\.(\d+)( \w+ (\d+)\.(\d+))?', version) if not m: raise AssertionError( - "Could not determine version from string '%s'" % version) + "Could not determine version from string '%s'" % version) if m.group(5) != None: return tuple([int(x) for x in m.group(6, 7, 4)] + ['firebird']) @@ -167,7 +172,7 @@ def _parse_version_info(self, version): def is_disconnect(self, e, connection, cursor): if isinstance(e, (self.dbapi.OperationalError, - self.dbapi.ProgrammingError)): + self.dbapi.ProgrammingError)): msg = str(e) return ('Unable to complete network request to host' in msg or 'Invalid connection state' in msg or diff --git a/lib/sqlalchemy/dialects/mssql/__init__.py b/lib/sqlalchemy/dialects/mssql/__init__.py index 7a2dfa60bd..8c9e85862f 100644 --- a/lib/sqlalchemy/dialects/mssql/__init__.py +++ b/lib/sqlalchemy/dialects/mssql/__init__.py @@ -1,11 +1,12 @@ # mssql/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php from sqlalchemy.dialects.mssql import base, pyodbc, adodbapi, \ - pymssql, zxjdbc, mxodbc + pymssql, zxjdbc, mxodbc base.dialect = pyodbc.dialect diff --git a/lib/sqlalchemy/dialects/mssql/adodbapi.py b/lib/sqlalchemy/dialects/mssql/adodbapi.py index 95cf424239..60fa25d348 100644 --- a/lib/sqlalchemy/dialects/mssql/adodbapi.py +++ b/lib/sqlalchemy/dialects/mssql/adodbapi.py @@ -1,5 +1,6 @@ # mssql/adodbapi.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -60,7 +61,7 @@ def create_connect_args(self, url): connectors = ["Provider=SQLOLEDB"] if 'port' in keys: connectors.append("Data Source=%s, %s" % - (keys.get("host"), keys.get("port"))) + (keys.get("host"), keys.get("port"))) else: connectors.append("Data Source=%s" % keys.get("host")) connectors.append("Initial Catalog=%s" % keys.get("database")) @@ -74,6 +75,6 @@ def create_connect_args(self, url): def is_disconnect(self, e, connection, cursor): return isinstance(e, self.dbapi.adodbapi.DatabaseError) and \ - "'connection failure'" in str(e) + "'connection failure'" in str(e) dialect = MSDialect_adodbapi diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py index 522cb5ce36..927dceff74 100644 --- a/lib/sqlalchemy/dialects/mssql/base.py +++ b/lib/sqlalchemy/dialects/mssql/base.py @@ -1,5 +1,6 @@ # mssql/base.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -12,18 +13,70 @@ Auto Increment Behavior ----------------------- -``IDENTITY`` columns are supported by using SQLAlchemy -``schema.Sequence()`` objects. In other words:: +SQL Server provides so-called "auto incrementing" behavior using the +``IDENTITY`` construct, which can be placed on an integer primary key. +SQLAlchemy considers ``IDENTITY`` within its default "autoincrement" behavior, +described at :paramref:`.Column.autoincrement`; this means +that by default, the first integer primary key column in a :class:`.Table` +will be considered to be the identity column and will generate DDL as such:: + + from sqlalchemy import Table, MetaData, Column, Integer + + m = MetaData() + t = Table('t', m, + Column('id', Integer, primary_key=True), + Column('x', Integer)) + m.create_all(engine) + +The above example will generate DDL as: + +.. sourcecode:: sql + + CREATE TABLE t ( + id INTEGER NOT NULL IDENTITY(1,1), + x INTEGER NULL, + PRIMARY KEY (id) + ) + +For the case where this default generation of ``IDENTITY`` is not desired, +specify ``autoincrement=False`` on all integer primary key columns:: + + m = MetaData() + t = Table('t', m, + Column('id', Integer, primary_key=True, autoincrement=False), + Column('x', Integer)) + m.create_all(engine) + +.. note:: + + An INSERT statement which refers to an explicit value for such + a column is prohibited by SQL Server, however SQLAlchemy will detect this + and modify the ``IDENTITY_INSERT`` flag accordingly at statement execution + time. As this is not a high performing process, care should be taken to + set the ``autoincrement`` flag appropriately for columns that will not + actually require IDENTITY behavior. + +Controlling "Start" and "Increment" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Specific control over the parameters of the ``IDENTITY`` value is supported +using the :class:`.schema.Sequence` object. While this object normally +represents an explicit "sequence" for supporting backends, on SQL Server it is +re-purposed to specify behavior regarding the identity column, including +support of the "start" and "increment" values:: from sqlalchemy import Table, Integer, Sequence, Column Table('test', metadata, Column('id', Integer, - Sequence('blah',100,10), primary_key=True), + Sequence('blah', start=100, increment=10), + primary_key=True), Column('name', String(20)) ).create(some_engine) -would yield:: +would yield: + +.. sourcecode:: sql CREATE TABLE test ( id INTEGER NOT NULL IDENTITY(100,10) PRIMARY KEY, @@ -33,14 +86,135 @@ Note that the ``start`` and ``increment`` values for sequences are optional and will default to 1,1. -Implicit ``autoincrement`` behavior works the same in MSSQL as it -does in other dialects and results in an ``IDENTITY`` column. +INSERT behavior +^^^^^^^^^^^^^^^^ + +Handling of the ``IDENTITY`` column at INSERT time involves two key +techniques. The most common is being able to fetch the "last inserted value" +for a given ``IDENTITY`` column, a process which SQLAlchemy performs +implicitly in many cases, most importantly within the ORM. + +The process for fetching this value has several variants: + +* In the vast majority of cases, RETURNING is used in conjunction with INSERT + statements on SQL Server in order to get newly generated primary key values: + + .. sourcecode:: sql + + INSERT INTO t (x) OUTPUT inserted.id VALUES (?) + +* When RETURNING is not available or has been disabled via + ``implicit_returning=False``, either the ``scope_identity()`` function or + the ``@@identity`` variable is used; behavior varies by backend: + + * when using PyODBC, the phrase ``; select scope_identity()`` will be + appended to the end of the INSERT statement; a second result set will be + fetched in order to receive the value. Given a table as:: + + t = Table('t', m, Column('id', Integer, primary_key=True), + Column('x', Integer), + implicit_returning=False) + + an INSERT will look like: + + .. sourcecode:: sql + + INSERT INTO t (x) VALUES (?); select scope_identity() + + * Other dialects such as pymssql will call upon + ``SELECT scope_identity() AS lastrowid`` subsequent to an INSERT + statement. If the flag ``use_scope_identity=False`` is passed to + :func:`.create_engine`, the statement ``SELECT @@identity AS lastrowid`` + is used instead. + +A table that contains an ``IDENTITY`` column will prohibit an INSERT statement +that refers to the identity column explicitly. The SQLAlchemy dialect will +detect when an INSERT construct, created using a core :func:`.insert` +construct (not a plain string SQL), refers to the identity column, and +in this case will emit ``SET IDENTITY_INSERT ON`` prior to the insert +statement proceeding, and ``SET IDENTITY_INSERT OFF`` subsequent to the +execution. Given this example:: + + m = MetaData() + t = Table('t', m, Column('id', Integer, primary_key=True), + Column('x', Integer)) + m.create_all(engine) + + engine.execute(t.insert(), {'id': 1, 'x':1}, {'id':2, 'x':2}) + +The above column will be created with IDENTITY, however the INSERT statement +we emit is specifying explicit values. In the echo output we can see +how SQLAlchemy handles this: + +.. sourcecode:: sql + + CREATE TABLE t ( + id INTEGER NOT NULL IDENTITY(1,1), + x INTEGER NULL, + PRIMARY KEY (id) + ) + + COMMIT + SET IDENTITY_INSERT t ON + INSERT INTO t (id, x) VALUES (?, ?) + ((1, 1), (2, 2)) + SET IDENTITY_INSERT t OFF + COMMIT + + + +This +is an auxilliary use case suitable for testing and bulk insert scenarios. + +.. _legacy_schema_rendering: + +Rendering of SQL statements that include schema qualifiers +--------------------------------------------------------- + +When using :class:`.Table` metadata that includes a "schema" qualifier, +such as:: + + account_table = Table( + 'account', metadata, + Column('id', Integer, primary_key=True), + Column('info', String(100)), + schema="customer_schema" + ) + +The SQL Server dialect has a long-standing behavior that it will attempt +to turn a schema-qualified table name into an alias, such as:: + + >>> eng = create_engine("mssql+pymssql://mydsn") + >>> print(account_table.select().compile(eng)) + SELECT account_1.id, account_1.info + FROM customer_schema.account AS account_1 + +This behavior is legacy, does not function correctly for many forms +of SQL statements, and will be disabled by default in the 1.1 series +of SQLAlchemy. As of 1.0.5, the above statement will produce the following +warning:: + + SAWarning: legacy_schema_aliasing flag is defaulted to True; + some schema-qualified queries may not function correctly. + Consider setting this flag to False for modern SQL Server versions; + this flag will default to False in version 1.1 -* Support for ``SET IDENTITY_INSERT ON`` mode (automagic on / off for - ``INSERT`` s) +This warning encourages the :class:`.Engine` to be created as follows:: -* Support for auto-fetching of ``@@IDENTITY/@@SCOPE_IDENTITY()`` on - ``INSERT`` + >>> eng = create_engine("mssql+pymssql://mydsn", legacy_schema_aliasing=False) + +Where the above SELECT statement will produce:: + + >>> print(account_table.select().compile(eng)) + SELECT customer_schema.account.id, customer_schema.account.info + FROM customer_schema.account + +The warning will not emit if the ``legacy_schema_aliasing`` flag is set +to either True or False. + +.. versionadded:: 1.0.5 - Added the ``legacy_schema_aliasing`` flag to disable + the SQL Server dialect's legacy behavior with schema-qualified table + names. This flag will default to False in version 1.1. Collation Support ----------------- @@ -62,7 +236,7 @@ LIMIT/OFFSET Support -------------------- -MSSQL has no support for the LIMIT or OFFSET keysowrds. LIMIT is +MSSQL has no support for the LIMIT or OFFSET keywords. LIMIT is supported directly through the ``TOP`` Transact SQL keyword:: select.limit @@ -90,7 +264,7 @@ name VARCHAR(20) If ``nullable`` is ``True`` or ``False`` then the column will be -``NULL` or ``NOT NULL`` respectively. +``NULL`` or ``NOT NULL`` respectively. Date / Time Handling -------------------- @@ -101,6 +275,53 @@ previous - if a server version below 2008 is detected, DDL for these types will be issued as DATETIME. +.. _mssql_large_type_deprecation: + +Large Text/Binary Type Deprecation +---------------------------------- + +Per `SQL Server 2012/2014 Documentation `_, +the ``NTEXT``, ``TEXT`` and ``IMAGE`` datatypes are to be removed from SQL Server +in a future release. SQLAlchemy normally relates these types to the +:class:`.UnicodeText`, :class:`.Text` and :class:`.LargeBinary` datatypes. + +In order to accommodate this change, a new flag ``deprecate_large_types`` +is added to the dialect, which will be automatically set based on detection +of the server version in use, if not otherwise set by the user. The +behavior of this flag is as follows: + +* When this flag is ``True``, the :class:`.UnicodeText`, :class:`.Text` and + :class:`.LargeBinary` datatypes, when used to render DDL, will render the + types ``NVARCHAR(max)``, ``VARCHAR(max)``, and ``VARBINARY(max)``, + respectively. This is a new behavior as of the addition of this flag. + +* When this flag is ``False``, the :class:`.UnicodeText`, :class:`.Text` and + :class:`.LargeBinary` datatypes, when used to render DDL, will render the + types ``NTEXT``, ``TEXT``, and ``IMAGE``, + respectively. This is the long-standing behavior of these types. + +* The flag begins with the value ``None``, before a database connection is + established. If the dialect is used to render DDL without the flag being + set, it is interpreted the same as ``False``. + +* On first connection, the dialect detects if SQL Server version 2012 or greater + is in use; if the flag is still at ``None``, it sets it to ``True`` or + ``False`` based on whether 2012 or greater is detected. + +* The flag can be set to either ``True`` or ``False`` when the dialect + is created, typically via :func:`.create_engine`:: + + eng = create_engine("mssql+pymssql://user:pass@host/db", + deprecate_large_types=True) + +* Complete control over whether the "old" or "new" types are rendered is + available in all SQLAlchemy versions by using the UPPERCASE type objects + instead: :class:`.NVARCHAR`, :class:`.VARCHAR`, :class:`.types.VARBINARY`, + :class:`.TEXT`, :class:`.mssql.NTEXT`, :class:`.mssql.IMAGE` will always remain + fixed and always output exactly that type. + +.. versionadded:: 1.0.0 + .. _mssql_indexes: Clustered Index Support @@ -127,7 +348,8 @@ which will render the table, for example, as:: - CREATE TABLE my_table (x INTEGER NOT NULL, y INTEGER NOT NULL, PRIMARY KEY CLUSTERED (x, y)) + CREATE TABLE my_table (x INTEGER NOT NULL, y INTEGER NOT NULL, + PRIMARY KEY CLUSTERED (x, y)) Similarly, we can generate a clustered unique constraint using:: @@ -149,7 +371,8 @@ INCLUDE ^^^^^^^ -The ``mssql_include`` option renders INCLUDE(colname) for the given string names:: +The ``mssql_include`` option renders INCLUDE(colname) for the given string +names:: Index("my_index", table.c.x, mssql_include=['y']) @@ -210,6 +433,40 @@ class MyClass(Base): This option can also be specified engine-wide using the ``implicit_returning=False`` argument on :func:`.create_engine`. +.. _mssql_rowcount_versioning: + +Rowcount Support / ORM Versioning +--------------------------------- + +The SQL Server drivers have very limited ability to return the number +of rows updated from an UPDATE or DELETE statement. In particular, the +pymssql driver has no support, whereas the pyodbc driver can only return +this value under certain conditions. + +In particular, updated rowcount is not available when OUTPUT INSERTED +is used. This impacts the SQLAlchemy ORM's versioning feature when +server-side versioning schemes are used. When +using pyodbc, the "implicit_returning" flag needs to be set to false +for any ORM mapped class that uses a version_id column in conjunction with +a server-side version generator:: + + class MyTable(Base): + __tablename__ = 'mytable' + id = Column(Integer, primary_key=True) + stuff = Column(String(10)) + timestamp = Column(TIMESTAMP(), default=text('DEFAULT')) + __mapper_args__ = { + 'version_id_col': timestamp, + 'version_id_generator': False, + } + __table_args__ = { + 'implicit_returning': False + } + +Without the implicit_returning flag above, the UPDATE statement will +use ``OUTPUT inserted.timestamp`` and the rowcount will be returned as +-1, causing the versioning logic to fail. + Enabling Snapshot Isolation --------------------------- @@ -240,19 +497,20 @@ class MyClass(Base): import re from ... import sql, schema as sa_schema, exc, util -from ...sql import compiler, expression, \ - util as sql_util, cast +from ...sql import compiler, expression, util as sql_util from ... import engine from ...engine import reflection, default from ... import types as sqltypes from ...types import INTEGER, BIGINT, SMALLINT, DECIMAL, NUMERIC, \ - FLOAT, TIMESTAMP, DATETIME, DATE, BINARY,\ - VARBINARY, TEXT, VARCHAR, NVARCHAR, CHAR, NCHAR + FLOAT, TIMESTAMP, DATETIME, DATE, BINARY,\ + TEXT, VARCHAR, NVARCHAR, CHAR, NCHAR from ...util import update_wrapper from . import information_schema as ischema +# http://sqlserverbuilds.blogspot.com/ +MS_2012_VERSION = (11,) MS_2008_VERSION = (10,) MS_2005_VERSION = (9,) MS_2000_VERSION = (8,) @@ -286,7 +544,7 @@ class MyClass(Base): 'unique', 'unpivot', 'update', 'updatetext', 'use', 'user', 'values', 'varying', 'view', 'waitfor', 'when', 'where', 'while', 'with', 'writetext', - ]) + ]) class REAL(sqltypes.REAL): @@ -308,6 +566,7 @@ class TINYINT(sqltypes.Integer): # not sure about other dialects). class _MSDate(sqltypes.Date): + def bind_processor(self, dialect): def process(value): if type(value) == datetime.date: @@ -323,16 +582,21 @@ def process(value): if isinstance(value, datetime.datetime): return value.date() elif isinstance(value, util.string_types): + m = self._reg.match(value) + if not m: + raise ValueError( + "could not parse %r as a date value" % (value, )) return datetime.date(*[ - int(x or 0) - for x in self._reg.match(value).groups() - ]) + int(x or 0) + for x in m.groups() + ]) else: return value return process class TIME(sqltypes.TIME): + def __init__(self, precision=None, **kwargs): self.precision = precision super(TIME, self).__init__() @@ -343,7 +607,7 @@ def bind_processor(self, dialect): def process(value): if isinstance(value, datetime.datetime): value = datetime.datetime.combine( - self.__zero_date, value.time()) + self.__zero_date, value.time()) elif isinstance(value, datetime.time): value = datetime.datetime.combine(self.__zero_date, value) return value @@ -356,9 +620,13 @@ def process(value): if isinstance(value, datetime.datetime): return value.time() elif isinstance(value, util.string_types): + m = self._reg.match(value) + if not m: + raise ValueError( + "could not parse %r as a time value" % (value, )) return datetime.time(*[ - int(x or 0) - for x in self._reg.match(value).groups()]) + int(x or 0) + for x in m.groups()]) else: return value return process @@ -366,6 +634,7 @@ def process(value): class _DateTimeBase(object): + def bind_processor(self, dialect): def process(value): if type(value) == datetime.date: @@ -400,21 +669,40 @@ def __init__(self, precision=None, **kwargs): class _StringType(object): + """Base for MSSQL string types.""" def __init__(self, collation=None): super(_StringType, self).__init__(collation=collation) - - class NTEXT(sqltypes.UnicodeText): + """MSSQL NTEXT type, for variable-length unicode text up to 2^30 characters.""" __visit_name__ = 'NTEXT' +class VARBINARY(sqltypes.VARBINARY, sqltypes.LargeBinary): + """The MSSQL VARBINARY type. + + This type extends both :class:`.types.VARBINARY` and + :class:`.types.LargeBinary`. In "deprecate_large_types" mode, + the :class:`.types.LargeBinary` type will produce ``VARBINARY(max)`` + on SQL Server. + + .. versionadded:: 1.0.0 + + .. seealso:: + + :ref:`mssql_large_type_deprecation` + + + + """ + __visit_name__ = 'VARBINARY' + class IMAGE(sqltypes.LargeBinary): __visit_name__ = 'IMAGE' @@ -515,105 +803,117 @@ def _extend(self, spec, type_, length=None): spec = spec + "(%s)" % length return ' '.join([c for c in (spec, collation) - if c is not None]) + if c is not None]) - def visit_FLOAT(self, type_): + def visit_FLOAT(self, type_, **kw): precision = getattr(type_, 'precision', None) if precision is None: return "FLOAT" else: return "FLOAT(%(precision)s)" % {'precision': precision} - def visit_TINYINT(self, type_): + def visit_TINYINT(self, type_, **kw): return "TINYINT" - def visit_DATETIMEOFFSET(self, type_): - if type_.precision: + def visit_DATETIMEOFFSET(self, type_, **kw): + if type_.precision is not None: return "DATETIMEOFFSET(%s)" % type_.precision else: return "DATETIMEOFFSET" - def visit_TIME(self, type_): + def visit_TIME(self, type_, **kw): precision = getattr(type_, 'precision', None) - if precision: + if precision is not None: return "TIME(%s)" % precision else: return "TIME" - def visit_DATETIME2(self, type_): + def visit_DATETIME2(self, type_, **kw): precision = getattr(type_, 'precision', None) - if precision: + if precision is not None: return "DATETIME2(%s)" % precision else: return "DATETIME2" - def visit_SMALLDATETIME(self, type_): + def visit_SMALLDATETIME(self, type_, **kw): return "SMALLDATETIME" - def visit_unicode(self, type_): - return self.visit_NVARCHAR(type_) + def visit_unicode(self, type_, **kw): + return self.visit_NVARCHAR(type_, **kw) - def visit_unicode_text(self, type_): - return self.visit_NTEXT(type_) + def visit_text(self, type_, **kw): + if self.dialect.deprecate_large_types: + return self.visit_VARCHAR(type_, **kw) + else: + return self.visit_TEXT(type_, **kw) - def visit_NTEXT(self, type_): + def visit_unicode_text(self, type_, **kw): + if self.dialect.deprecate_large_types: + return self.visit_NVARCHAR(type_, **kw) + else: + return self.visit_NTEXT(type_, **kw) + + def visit_NTEXT(self, type_, **kw): return self._extend("NTEXT", type_) - def visit_TEXT(self, type_): + def visit_TEXT(self, type_, **kw): return self._extend("TEXT", type_) - def visit_VARCHAR(self, type_): + def visit_VARCHAR(self, type_, **kw): return self._extend("VARCHAR", type_, length=type_.length or 'max') - def visit_CHAR(self, type_): + def visit_CHAR(self, type_, **kw): return self._extend("CHAR", type_) - def visit_NCHAR(self, type_): + def visit_NCHAR(self, type_, **kw): return self._extend("NCHAR", type_) - def visit_NVARCHAR(self, type_): + def visit_NVARCHAR(self, type_, **kw): return self._extend("NVARCHAR", type_, length=type_.length or 'max') - def visit_date(self, type_): + def visit_date(self, type_, **kw): if self.dialect.server_version_info < MS_2008_VERSION: - return self.visit_DATETIME(type_) + return self.visit_DATETIME(type_, **kw) else: - return self.visit_DATE(type_) + return self.visit_DATE(type_, **kw) - def visit_time(self, type_): + def visit_time(self, type_, **kw): if self.dialect.server_version_info < MS_2008_VERSION: - return self.visit_DATETIME(type_) + return self.visit_DATETIME(type_, **kw) else: - return self.visit_TIME(type_) + return self.visit_TIME(type_, **kw) - def visit_large_binary(self, type_): - return self.visit_IMAGE(type_) + def visit_large_binary(self, type_, **kw): + if self.dialect.deprecate_large_types: + return self.visit_VARBINARY(type_, **kw) + else: + return self.visit_IMAGE(type_, **kw) - def visit_IMAGE(self, type_): + def visit_IMAGE(self, type_, **kw): return "IMAGE" - def visit_VARBINARY(self, type_): + def visit_VARBINARY(self, type_, **kw): return self._extend( - "VARBINARY", - type_, - length=type_.length or 'max') + "VARBINARY", + type_, + length=type_.length or 'max') - def visit_boolean(self, type_): + def visit_boolean(self, type_, **kw): return self.visit_BIT(type_) - def visit_BIT(self, type_): + def visit_BIT(self, type_, **kw): return "BIT" - def visit_MONEY(self, type_): + def visit_MONEY(self, type_, **kw): return "MONEY" - def visit_SMALLMONEY(self, type_): + def visit_SMALLMONEY(self, type_, **kw): return 'SMALLMONEY' - def visit_UNIQUEIDENTIFIER(self, type_): + def visit_UNIQUEIDENTIFIER(self, type_, **kw): return "UNIQUEIDENTIFIER" - def visit_SQL_VARIANT(self, type_): + def visit_SQL_VARIANT(self, type_, **kw): return 'SQL_VARIANT' @@ -623,6 +923,12 @@ class MSExecutionContext(default.DefaultExecutionContext): _result_proxy = None _lastrowid = None + def _opt_encode(self, statement): + if not self.dialect.supports_unicode_statements: + return self.dialect._encoder(statement)[0] + else: + return statement + def pre_exec(self): """Activate IDENTITY_INSERT if needed.""" @@ -633,20 +939,39 @@ def pre_exec(self): if insert_has_sequence: self._enable_identity_insert = \ - seq_column.key in self.compiled_parameters[0] + seq_column.key in self.compiled_parameters[0] or \ + ( + self.compiled.statement.parameters and ( + ( + self.compiled.statement._has_multi_parameters + and + seq_column.key in + self.compiled.statement.parameters[0] + ) or ( + not + self.compiled.statement._has_multi_parameters + and + seq_column.key in + self.compiled.statement.parameters + ) + ) + ) else: self._enable_identity_insert = False self._select_lastrowid = insert_has_sequence and \ - not self.compiled.returning and \ - not self._enable_identity_insert and \ - not self.executemany + not self.compiled.returning and \ + not self._enable_identity_insert and \ + not self.executemany if self._enable_identity_insert: - self.root_connection._cursor_execute(self.cursor, - "SET IDENTITY_INSERT %s ON" % - self.dialect.identifier_preparer.format_table(tbl), - (), self) + self.root_connection._cursor_execute( + self.cursor, + self._opt_encode( + "SET IDENTITY_INSERT %s ON" % + self.dialect.identifier_preparer.format_table(tbl)), + (), + self) def post_exec(self): """Disable IDENTITY_INSERT if enabled.""" @@ -654,11 +979,14 @@ def post_exec(self): conn = self.root_connection if self._select_lastrowid: if self.dialect.use_scope_identity: - conn._cursor_execute(self.cursor, + conn._cursor_execute( + self.cursor, "SELECT scope_identity() AS lastrowid", (), self) else: conn._cursor_execute(self.cursor, - "SELECT @@identity AS lastrowid", (), self) + "SELECT @@identity AS lastrowid", + (), + self) # fetchall() ensures the cursor is consumed without closing it row = self.cursor.fetchall()[0] self._lastrowid = int(row[0]) @@ -668,11 +996,14 @@ def post_exec(self): self._result_proxy = engine.FullyBufferedResultProxy(self) if self._enable_identity_insert: - conn._cursor_execute(self.cursor, - "SET IDENTITY_INSERT %s OFF" % - self.dialect.identifier_preparer. - format_table(self.compiled.statement.table), - (), self) + conn._cursor_execute( + self.cursor, + self._opt_encode( + "SET IDENTITY_INSERT %s OFF" % + self.dialect.identifier_preparer. format_table( + self.compiled.statement.table)), + (), + self) def get_lastrowid(self): return self._lastrowid @@ -681,11 +1012,11 @@ def handle_dbapi_exception(self, e): if self._enable_identity_insert: try: self.cursor.execute( + self._opt_encode( "SET IDENTITY_INSERT %s OFF" % - self.dialect.identifier_preparer.\ - format_table(self.compiled.statement.table) - ) - except: + self.dialect.identifier_preparer. format_table( + self.compiled.statement.table))) + except Exception: pass def get_result_proxy(self): @@ -701,16 +1032,25 @@ class MSSQLCompiler(compiler.SQLCompiler): extract_map = util.update_copy( compiler.SQLCompiler.extract_map, { - 'doy': 'dayofyear', - 'dow': 'weekday', - 'milliseconds': 'millisecond', - 'microseconds': 'microsecond' - }) + 'doy': 'dayofyear', + 'dow': 'weekday', + 'milliseconds': 'millisecond', + 'microseconds': 'microsecond' + }) def __init__(self, *args, **kwargs): self.tablealiases = {} super(MSSQLCompiler, self).__init__(*args, **kwargs) + def _with_legacy_schema_aliasing(fn): + def decorate(self, *arg, **kw): + if self.dialect.legacy_schema_aliasing: + return fn(self, *arg, **kw) + else: + super_ = getattr(super(MSSQLCompiler, self), fn.__name__) + return super_(*arg, **kw) + return decorate + def visit_now_func(self, fn, **kw): return "CURRENT_TIMESTAMP" @@ -725,8 +1065,8 @@ def visit_char_length_func(self, fn, **kw): def visit_concat_op_binary(self, binary, operator, **kw): return "%s + %s" % \ - (self.process(binary.left, **kw), - self.process(binary.right, **kw)) + (self.process(binary.left, **kw), + self.process(binary.right, **kw)) def visit_true(self, expr, **kw): return '1' @@ -736,22 +1076,27 @@ def visit_false(self, expr, **kw): def visit_match_op_binary(self, binary, operator, **kw): return "CONTAINS (%s, %s)" % ( - self.process(binary.left, **kw), - self.process(binary.right, **kw)) + self.process(binary.left, **kw), + self.process(binary.right, **kw)) - def get_select_precolumns(self, select): + def get_select_precolumns(self, select, **kw): """ MS-SQL puts TOP, it's version of LIMIT here """ - if select._distinct or select._limit is not None: - s = select._distinct and "DISTINCT " or "" + s = "" + if select._distinct: + s += "DISTINCT " + + if select._simple_int_limit and not select._offset: # ODBC drivers and possibly others # don't support bind params in the SELECT clause on SQL Server. # so have to use literal here. - if select._limit is not None: - if not select._offset: - s += "TOP %d " % select._limit + s += "TOP %d " % select._limit + + if s: return s - return compiler.SQLCompiler.get_select_precolumns(self, select) + else: + return compiler.SQLCompiler.get_select_precolumns( + self, select, **kw) def get_from_hint_text(self, table, text): return text @@ -759,7 +1104,7 @@ def get_from_hint_text(self, table, text): def get_crud_hint_text(self, table, text): return text - def limit_clause(self, select): + def limit_clause(self, select, **kw): # Limit in mssql is after the select keyword return "" @@ -768,40 +1113,52 @@ def visit_select(self, select, **kwargs): so tries to wrap it in a subquery with ``row_number()`` criterion. """ - if select._offset and not getattr(select, '_mssql_visit', None): + if ( + ( + not select._simple_int_limit and + select._limit_clause is not None + ) or ( + select._offset_clause is not None and + not select._simple_int_offset or select._offset + ) + ) and not getattr(select, '_mssql_visit', None): + # to use ROW_NUMBER(), an ORDER BY is required. if not select._order_by_clause.clauses: raise exc.CompileError('MSSQL requires an order_by when ' - 'using an offset.') + 'using an OFFSET or a non-simple ' + 'LIMIT clause') + + _order_by_clauses = [ + sql_util.unwrap_label_reference(elem) + for elem in select._order_by_clause.clauses + ] - _offset = select._offset - _limit = select._limit - _order_by_clauses = select._order_by_clause.clauses + limit_clause = select._limit_clause + offset_clause = select._offset_clause + kwargs['select_wraps_for'] = select select = select._generate() select._mssql_visit = True select = select.column( - sql.func.ROW_NUMBER().over(order_by=_order_by_clauses) - .label("mssql_rn") - ).order_by(None).alias() + sql.func.ROW_NUMBER().over(order_by=_order_by_clauses) + .label("mssql_rn")).order_by(None).alias() mssql_rn = sql.column('mssql_rn') limitselect = sql.select([c for c in select.c if - c.key != 'mssql_rn']) - limitselect.append_whereclause(mssql_rn > _offset) - if _limit is not None: - limitselect.append_whereclause(mssql_rn <= (_limit + _offset)) - return self.process(limitselect, iswrapper=True, **kwargs) + c.key != 'mssql_rn']) + if offset_clause is not None: + limitselect.append_whereclause(mssql_rn > offset_clause) + if limit_clause is not None: + limitselect.append_whereclause( + mssql_rn <= (limit_clause + offset_clause)) + else: + limitselect.append_whereclause( + mssql_rn <= (limit_clause)) + return self.process(limitselect, **kwargs) else: return compiler.SQLCompiler.visit_select(self, select, **kwargs) - def _schema_aliased_table(self, table): - if getattr(table, 'schema', None) is not None: - if table not in self.tablealiases: - self.tablealiases[table] = table.alias() - return self.tablealiases[table] - else: - return None - + @_with_legacy_schema_aliasing def visit_table(self, table, mssql_aliased=False, iscrud=False, **kwargs): if mssql_aliased is table or iscrud: return super(MSSQLCompiler, self).visit_table(table, **kwargs) @@ -813,44 +1170,65 @@ def visit_table(self, table, mssql_aliased=False, iscrud=False, **kwargs): else: return super(MSSQLCompiler, self).visit_table(table, **kwargs) - def visit_alias(self, alias, **kwargs): + @_with_legacy_schema_aliasing + def visit_alias(self, alias, **kw): # translate for schema-qualified table aliases - kwargs['mssql_aliased'] = alias.original - return super(MSSQLCompiler, self).visit_alias(alias, **kwargs) - - def visit_extract(self, extract, **kw): - field = self.extract_map.get(extract.field, extract.field) - return 'DATEPART("%s", %s)' % \ - (field, self.process(extract.expr, **kw)) - - def visit_savepoint(self, savepoint_stmt): - return "SAVE TRANSACTION %s" % self.preparer.format_savepoint(savepoint_stmt) - - def visit_rollback_to_savepoint(self, savepoint_stmt): - return ("ROLLBACK TRANSACTION %s" - % self.preparer.format_savepoint(savepoint_stmt)) + kw['mssql_aliased'] = alias.original + return super(MSSQLCompiler, self).visit_alias(alias, **kw) - def visit_column(self, column, add_to_result_map=None, **kwargs): + @_with_legacy_schema_aliasing + def visit_column(self, column, add_to_result_map=None, **kw): if column.table is not None and \ - (not self.isupdate and not self.isdelete) or self.is_subquery(): + (not self.isupdate and not self.isdelete) or \ + self.is_subquery(): # translate for schema-qualified table aliases t = self._schema_aliased_table(column.table) if t is not None: converted = expression._corresponding_column_or_error( - t, column) + t, column) if add_to_result_map is not None: add_to_result_map( - column.name, - column.name, - (column, column.name, column.key), - column.type + column.name, + column.name, + (column, column.name, column.key), + column.type ) return super(MSSQLCompiler, self).\ - visit_column(converted, **kwargs) + visit_column(converted, **kw) return super(MSSQLCompiler, self).visit_column( - column, add_to_result_map=add_to_result_map, **kwargs) + column, add_to_result_map=add_to_result_map, **kw) + + def _schema_aliased_table(self, table): + if getattr(table, 'schema', None) is not None: + if self.dialect._warn_schema_aliasing and \ + table.schema.lower() != 'information_schema': + util.warn( + "legacy_schema_aliasing flag is defaulted to True; " + "some schema-qualified queries may not function " + "correctly. Consider setting this flag to False for " + "modern SQL Server versions; this flag will default to " + "False in version 1.1") + + if table not in self.tablealiases: + self.tablealiases[table] = table.alias() + return self.tablealiases[table] + else: + return None + + def visit_extract(self, extract, **kw): + field = self.extract_map.get(extract.field, extract.field) + return 'DATEPART(%s, %s)' % \ + (field, self.process(extract.expr, **kw)) + + def visit_savepoint(self, savepoint_stmt): + return "SAVE TRANSACTION %s" % \ + self.preparer.format_savepoint(savepoint_stmt) + + def visit_rollback_to_savepoint(self, savepoint_stmt): + return ("ROLLBACK TRANSACTION %s" + % self.preparer.format_savepoint(savepoint_stmt)) def visit_binary(self, binary, **kwargs): """Move bind parameters to the right-hand side of an operator, where @@ -861,12 +1239,12 @@ def visit_binary(self, binary, **kwargs): isinstance(binary.left, expression.BindParameter) and binary.operator == operator.eq and not isinstance(binary.right, expression.BindParameter) - ): + ): return self.process( - expression.BinaryExpression(binary.right, - binary.left, - binary.operator), - **kwargs) + expression.BinaryExpression(binary.right, + binary.left, + binary.operator), + **kwargs) return super(MSSQLCompiler, self).visit_binary(binary, **kwargs) def returning_clause(self, stmt, returning_cols): @@ -879,10 +1257,10 @@ def returning_clause(self, stmt, returning_cols): adapter = sql_util.ClauseAdapter(target) columns = [ - self._label_select_column(None, adapter.traverse(c), - True, False, {}) - for c in expression._select_iterables(returning_cols) - ] + self._label_select_column(None, adapter.traverse(c), + True, False, {}) + for c in expression._select_iterables(returning_cols) + ] return 'OUTPUT ' + ', '.join(columns) @@ -898,7 +1276,7 @@ def label_select_column(self, select, column, asfrom): return column.label(None) else: return super(MSSQLCompiler, self).\ - label_select_column(select, column, asfrom) + label_select_column(select, column, asfrom) def for_update_clause(self, select): # "FOR UPDATE" is only allowed on "DECLARE CURSOR" which @@ -915,9 +1293,9 @@ def order_by_clause(self, select, **kw): return "" def update_from_clause(self, update_stmt, - from_table, extra_froms, - from_hints, - **kw): + from_table, extra_froms, + from_hints, + **kw): """Render the UPDATE..FROM clause specific to MSSQL. In MSSQL, if the UPDATE statement involves an alias of the table to @@ -926,12 +1304,13 @@ def update_from_clause(self, update_stmt, """ return "FROM " + ', '.join( - t._compiler_dispatch(self, asfrom=True, - fromhints=from_hints, **kw) - for t in [from_table] + extra_froms) + t._compiler_dispatch(self, asfrom=True, + fromhints=from_hints, **kw) + for t in [from_table] + extra_froms) class MSSQLStrictCompiler(MSSQLCompiler): + """A subclass of MSSQLCompiler which disables the usage of bind parameters where not allowed natively by MS-SQL. @@ -944,16 +1323,16 @@ class MSSQLStrictCompiler(MSSQLCompiler): def visit_in_op_binary(self, binary, operator, **kw): kw['literal_binds'] = True return "%s IN %s" % ( - self.process(binary.left, **kw), - self.process(binary.right, **kw) - ) + self.process(binary.left, **kw), + self.process(binary.right, **kw) + ) def visit_notin_op_binary(self, binary, operator, **kw): kw['literal_binds'] = True return "%s NOT IN %s" % ( - self.process(binary.left, **kw), - self.process(binary.right, **kw) - ) + self.process(binary.left, **kw), + self.process(binary.right, **kw) + ) def render_literal_value(self, value, type_): """ @@ -972,13 +1351,17 @@ def render_literal_value(self, value, type_): return "'" + str(value) + "'" else: return super(MSSQLStrictCompiler, self).\ - render_literal_value(value, type_) + render_literal_value(value, type_) class MSDDLCompiler(compiler.DDLCompiler): + def get_column_specification(self, column, **kwargs): - colspec = (self.preparer.format_column(column) + " " - + self.dialect.type_compiler.process(column.type)) + colspec = ( + self.preparer.format_column(column) + " " + + self.dialect.type_compiler.process( + column.type, type_expression=column) + ) if column.nullable is not None: if not column.nullable or column.primary_key or \ @@ -989,17 +1372,19 @@ def get_column_specification(self, column, **kwargs): if column.table is None: raise exc.CompileError( - "mssql requires Table-bound columns " - "in order to generate DDL") + "mssql requires Table-bound columns " + "in order to generate DDL") - # install an IDENTITY Sequence if we either a sequence or an implicit IDENTITY column + # install an IDENTITY Sequence if we either a sequence or an implicit + # IDENTITY column if isinstance(column.default, sa_schema.Sequence): if column.default.start == 0: start = 0 else: start = column.default.start or 1 - colspec += " IDENTITY(%s,%s)" % (start, column.default.increment or 1) + colspec += " IDENTITY(%s,%s)" % (start, + column.default.increment or 1) elif column is column.table._autoincrement_column: colspec += " IDENTITY(1,1)" else: @@ -1022,21 +1407,24 @@ def visit_create_index(self, create, include_schema=False): text += "CLUSTERED " text += "INDEX %s ON %s (%s)" \ - % ( - self._prepared_index_name(index, - include_schema=include_schema), - preparer.format_table(index.table), - ', '.join( - self.sql_compiler.process(expr, - include_table=False, literal_binds=True) for - expr in index.expressions) - ) + % ( + self._prepared_index_name(index, + include_schema=include_schema), + preparer.format_table(index.table), + ', '.join( + self.sql_compiler.process(expr, + include_table=False, + literal_binds=True) for + expr in index.expressions) + ) # handle other included columns if index.dialect_options['mssql']['include']: inclusions = [index.table.c[col] - if isinstance(col, util.string_types) else col - for col in index.dialect_options['mssql']['include']] + if isinstance(col, util.string_types) else col + for col in + index.dialect_options['mssql']['include'] + ] text += " INCLUDE (%s)" \ % ', '.join([preparer.quote(c.name) @@ -1048,7 +1436,7 @@ def visit_drop_index(self, drop): return "\nDROP INDEX %s ON %s" % ( self._prepared_index_name(drop.element, include_schema=False), self.preparer.format_table(drop.element.table) - ) + ) def visit_primary_key_constraint(self, constraint): if len(constraint) == 0: @@ -1084,6 +1472,7 @@ def visit_unique_constraint(self, constraint): text += self.define_constraint_deferrability(constraint) return text + class MSIdentifierPreparer(compiler.IdentifierPreparer): reserved_words = RESERVED_WORDS @@ -1104,7 +1493,7 @@ def _db_plus_owner_listing(fn): def wrap(dialect, connection, schema=None, **kw): dbname, owner = _owner_plus_db(dialect, schema) return _switch_db(dbname, connection, fn, dialect, connection, - dbname, owner, schema, **kw) + dbname, owner, schema, **kw) return update_wrapper(wrap, fn) @@ -1112,7 +1501,7 @@ def _db_plus_owner(fn): def wrap(dialect, connection, tablename, schema=None, **kw): dbname, owner = _owner_plus_db(dialect, schema) return _switch_db(dbname, connection, fn, dialect, connection, - tablename, dbname, owner, schema, **kw) + tablename, dbname, owner, schema, **kw) return update_wrapper(wrap, fn) @@ -1151,6 +1540,10 @@ class MSDialect(default.DefaultDialect): sqltypes.Time: TIME, } + engine_config_types = default.DefaultDialect.engine_config_types.union([ + ('legacy_schema_aliasing', util.asbool), + ]) + ischema_names = ischema_names supports_native_boolean = False @@ -1181,13 +1574,24 @@ def __init__(self, query_timeout=None, use_scope_identity=True, max_identifier_length=None, - schema_name="dbo", **opts): + schema_name="dbo", + deprecate_large_types=None, + legacy_schema_aliasing=None, **opts): self.query_timeout = int(query_timeout or 0) self.schema_name = schema_name self.use_scope_identity = use_scope_identity self.max_identifier_length = int(max_identifier_length or 0) or \ - self.max_identifier_length + self.max_identifier_length + self.deprecate_large_types = deprecate_large_types + + if legacy_schema_aliasing is None: + self.legacy_schema_aliasing = True + self._warn_schema_aliasing = True + else: + self.legacy_schema_aliasing = legacy_schema_aliasing + self._warn_schema_aliasing = False + super(MSDialect, self).__init__(**opts) def do_savepoint(self, connection, name): @@ -1201,37 +1605,46 @@ def do_release_savepoint(self, connection, name): def initialize(self, connection): super(MSDialect, self).initialize(connection) + self._setup_version_attributes() + + def _setup_version_attributes(self): if self.server_version_info[0] not in list(range(8, 17)): # FreeTDS with version 4.2 seems to report here # a number like "95.10.255". Don't know what # that is. So emit warning. + # Use TDS Version 7.0 through 7.3, per the MS information here: + # https://msdn.microsoft.com/en-us/library/dd339982.aspx + # and FreeTDS information here (7.3 highest supported version): + # http://www.freetds.org/userguide/choosingtdsprotocol.htm util.warn( "Unrecognized server version info '%s'. Version specific " "behaviors may not function properly. If using ODBC " - "with FreeTDS, ensure server version 7.0 or 8.0, not 4.2, " - "is configured in the FreeTDS configuration." % + "with FreeTDS, ensure TDS_VERSION 7.0 through 7.3, not " + "4.2, is configured in the FreeTDS configuration." % ".".join(str(x) for x in self.server_version_info)) if self.server_version_info >= MS_2005_VERSION and \ - 'implicit_returning' not in self.__dict__: + 'implicit_returning' not in self.__dict__: self.implicit_returning = True + if self.server_version_info >= MS_2008_VERSION: + self.supports_multivalues_insert = True + if self.deprecate_large_types is None: + self.deprecate_large_types = \ + self.server_version_info >= MS_2012_VERSION def _get_default_schema_name(self, connection): - user_name = connection.scalar("SELECT user_name()") - if user_name is not None: - # now, get the default schema - query = sql.text(""" + if self.server_version_info < MS_2005_VERSION: + return self.schema_name + + query = sql.text(""" SELECT default_schema_name FROM sys.database_principals - WHERE name = :name - AND type = 'S' - """) - try: - default_schema_name = connection.scalar(query, name=user_name) - if default_schema_name is not None: - return util.text_type(default_schema_name) - except: - pass - return self.schema_name + WHERE principal_id=database_principal_id() + """) + default_schema_name = connection.scalar(query) + if default_schema_name is not None: + return util.text_type(default_schema_name) + else: + return self.schema_name @_db_plus_owner def has_table(self, connection, tablename, dbname, owner, schema): @@ -1249,8 +1662,8 @@ def has_table(self, connection, tablename, dbname, owner, schema): @reflection.cache def get_schema_names(self, connection, **kw): s = sql.select([ischema.schemata.c.schema_name], - order_by=[ischema.schemata.c.schema_name] - ) + order_by=[ischema.schemata.c.schema_name] + ) schema_names = [r[0] for r in connection.execute(s)] return schema_names @@ -1259,10 +1672,10 @@ def get_schema_names(self, connection, **kw): def get_table_names(self, connection, dbname, owner, schema, **kw): tables = ischema.tables s = sql.select([tables.c.table_name], - sql.and_( - tables.c.table_schema == owner, - tables.c.table_type == 'BASE TABLE' - ), + sql.and_( + tables.c.table_schema == owner, + tables.c.table_type == 'BASE TABLE' + ), order_by=[tables.c.table_name] ) table_names = [r[0] for r in connection.execute(s)] @@ -1273,10 +1686,10 @@ def get_table_names(self, connection, dbname, owner, schema, **kw): def get_view_names(self, connection, dbname, owner, schema, **kw): tables = ischema.tables s = sql.select([tables.c.table_name], - sql.and_( - tables.c.table_schema == owner, - tables.c.table_type == 'VIEW' - ), + sql.and_( + tables.c.table_schema == owner, + tables.c.table_type == 'VIEW' + ), order_by=[tables.c.table_name] ) view_names = [r[0] for r in connection.execute(s)] @@ -1292,22 +1705,22 @@ def get_indexes(self, connection, tablename, dbname, owner, schema, **kw): rp = connection.execute( sql.text("select ind.index_id, ind.is_unique, ind.name " - "from sys.indexes as ind join sys.tables as tab on " - "ind.object_id=tab.object_id " - "join sys.schemas as sch on sch.schema_id=tab.schema_id " - "where tab.name = :tabname " - "and sch.name=:schname " - "and ind.is_primary_key=0", - bindparams=[ - sql.bindparam('tabname', tablename, - sqltypes.String(convert_unicode=True)), - sql.bindparam('schname', owner, - sqltypes.String(convert_unicode=True)) - ], - typemap={ - 'name': sqltypes.Unicode() - } - ) + "from sys.indexes as ind join sys.tables as tab on " + "ind.object_id=tab.object_id " + "join sys.schemas as sch on sch.schema_id=tab.schema_id " + "where tab.name = :tabname " + "and sch.name=:schname " + "and ind.is_primary_key=0", + bindparams=[ + sql.bindparam('tabname', tablename, + sqltypes.String(convert_unicode=True)), + sql.bindparam('schname', owner, + sqltypes.String(convert_unicode=True)) + ], + typemap={ + 'name': sqltypes.Unicode() + } + ) ) indexes = {} for row in rp: @@ -1327,15 +1740,15 @@ def get_indexes(self, connection, tablename, dbname, owner, schema, **kw): "join sys.schemas as sch on sch.schema_id=tab.schema_id " "where tab.name=:tabname " "and sch.name=:schname", - bindparams=[ - sql.bindparam('tabname', tablename, - sqltypes.String(convert_unicode=True)), - sql.bindparam('schname', owner, - sqltypes.String(convert_unicode=True)) - ], - typemap={'name': sqltypes.Unicode()} - ), - ) + bindparams=[ + sql.bindparam('tabname', tablename, + sqltypes.String(convert_unicode=True)), + sql.bindparam('schname', owner, + sqltypes.String(convert_unicode=True)) + ], + typemap={'name': sqltypes.Unicode()} + ), + ) for row in rp: if row['index_id'] in indexes: indexes[row['index_id']]['column_names'].append(row['name']) @@ -1344,7 +1757,8 @@ def get_indexes(self, connection, tablename, dbname, owner, schema, **kw): @reflection.cache @_db_plus_owner - def get_view_definition(self, connection, viewname, dbname, owner, schema, **kw): + def get_view_definition(self, connection, viewname, + dbname, owner, schema, **kw): rp = connection.execute( sql.text( "select definition from sys.sql_modules as mod, " @@ -1356,9 +1770,9 @@ def get_view_definition(self, connection, viewname, dbname, owner, schema, **kw) "views.name=:viewname and sch.name=:schname", bindparams=[ sql.bindparam('viewname', viewname, - sqltypes.String(convert_unicode=True)), + sqltypes.String(convert_unicode=True)), sql.bindparam('schname', owner, - sqltypes.String(convert_unicode=True)) + sqltypes.String(convert_unicode=True)) ] ) ) @@ -1378,7 +1792,7 @@ def get_columns(self, connection, tablename, dbname, owner, schema, **kw): else: whereclause = columns.c.table_name == tablename s = sql.select([columns], whereclause, - order_by=[columns.c.ordinal_position]) + order_by=[columns.c.ordinal_position]) c = connection.execute(s) cols = [] @@ -1403,12 +1817,11 @@ def get_columns(self, connection, tablename, dbname, owner, schema, **kw): if coltype in (MSString, MSChar, MSNVarchar, MSNChar, MSText, MSNText, MSBinary, MSVarBinary, sqltypes.LargeBinary): + if charlen == -1: + charlen = 'max' kwargs['length'] = charlen if collation: kwargs['collation'] = collation - if coltype == MSText or \ - (coltype in (MSString, MSNVarchar) and charlen == -1): - kwargs.pop('length') if coltype is None: util.warn( @@ -1448,7 +1861,7 @@ def get_columns(self, connection, tablename, dbname, owner, schema, **kw): ic = col_name colmap[col_name]['autoincrement'] = True colmap[col_name]['sequence'] = dict( - name='%s_identity' % col_name) + name='%s_identity' % col_name) break cursor.close() @@ -1457,7 +1870,7 @@ def get_columns(self, connection, tablename, dbname, owner, schema, **kw): cursor = connection.execute( "select ident_seed('%s'), ident_incr('%s')" % (table_fullname, table_fullname) - ) + ) row = cursor.first() if row is not None and row[0] is not None: @@ -1469,18 +1882,21 @@ def get_columns(self, connection, tablename, dbname, owner, schema, **kw): @reflection.cache @_db_plus_owner - def get_pk_constraint(self, connection, tablename, dbname, owner, schema, **kw): + def get_pk_constraint(self, connection, tablename, + dbname, owner, schema, **kw): pkeys = [] TC = ischema.constraints C = ischema.key_constraints.alias('C') # Primary key constraints - s = sql.select([C.c.column_name, TC.c.constraint_type, C.c.constraint_name], - sql.and_(TC.c.constraint_name == C.c.constraint_name, - TC.c.table_schema == C.c.table_schema, - C.c.table_name == tablename, - C.c.table_schema == owner) - ) + s = sql.select([C.c.column_name, + TC.c.constraint_type, + C.c.constraint_name], + sql.and_(TC.c.constraint_name == C.c.constraint_name, + TC.c.table_schema == C.c.table_schema, + C.c.table_name == tablename, + C.c.table_schema == owner) + ) c = connection.execute(s) constraint_name = None for row in c: @@ -1492,7 +1908,8 @@ def get_pk_constraint(self, connection, tablename, dbname, owner, schema, **kw): @reflection.cache @_db_plus_owner - def get_foreign_keys(self, connection, tablename, dbname, owner, schema, **kw): + def get_foreign_keys(self, connection, tablename, + dbname, owner, schema, **kw): RR = ischema.ref_constraints C = ischema.key_constraints.alias('C') R = ischema.key_constraints.alias('R') @@ -1507,11 +1924,11 @@ def get_foreign_keys(self, connection, tablename, dbname, owner, schema, **kw): C.c.table_schema == owner, C.c.constraint_name == RR.c.constraint_name, R.c.constraint_name == - RR.c.unique_constraint_name, + RR.c.unique_constraint_name, C.c.ordinal_position == R.c.ordinal_position ), order_by=[RR.c.constraint_name, R.c.ordinal_position] - ) + ) # group rows by constraint ID, to handle multi-column FKs fkeys = [] @@ -1541,8 +1958,8 @@ def fkey_rec(): rec['referred_schema'] = rschema local_cols, remote_cols = \ - rec['constrained_columns'],\ - rec['referred_columns'] + rec['constrained_columns'],\ + rec['referred_columns'] local_cols.append(scol) remote_cols.append(rcol) diff --git a/lib/sqlalchemy/dialects/mssql/information_schema.py b/lib/sqlalchemy/dialects/mssql/information_schema.py index 26e70f7f09..e2c0a466cd 100644 --- a/lib/sqlalchemy/dialects/mssql/information_schema.py +++ b/lib/sqlalchemy/dialects/mssql/information_schema.py @@ -1,10 +1,12 @@ # mssql/information_schema.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -# TODO: should be using the sys. catalog with SQL Server, not information schema +# TODO: should be using the sys. catalog with SQL Server, not information +# schema from ... import Table, MetaData, Column from ...types import String, Unicode, UnicodeText, Integer, TypeDecorator @@ -15,6 +17,7 @@ ischema = MetaData() + class CoerceUnicode(TypeDecorator): impl = Unicode @@ -26,10 +29,12 @@ def process_bind_param(self, value, dialect): def bind_expression(self, bindvalue): return _cast_on_2005(bindvalue) + class _cast_on_2005(expression.ColumnElement): def __init__(self, bindvalue): self.bindvalue = bindvalue + @compiles(_cast_on_2005) def _compile(element, compiler, **kw): from . import base @@ -39,76 +44,93 @@ def _compile(element, compiler, **kw): return compiler.process(cast(element.bindvalue, Unicode), **kw) schemata = Table("SCHEMATA", ischema, - Column("CATALOG_NAME", CoerceUnicode, key="catalog_name"), - Column("SCHEMA_NAME", CoerceUnicode, key="schema_name"), - Column("SCHEMA_OWNER", CoerceUnicode, key="schema_owner"), - schema="INFORMATION_SCHEMA") + Column("CATALOG_NAME", CoerceUnicode, key="catalog_name"), + Column("SCHEMA_NAME", CoerceUnicode, key="schema_name"), + Column("SCHEMA_OWNER", CoerceUnicode, key="schema_owner"), + schema="INFORMATION_SCHEMA") tables = Table("TABLES", ischema, - Column("TABLE_CATALOG", CoerceUnicode, key="table_catalog"), - Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"), - Column("TABLE_NAME", CoerceUnicode, key="table_name"), - Column("TABLE_TYPE", String(convert_unicode=True), key="table_type"), - schema="INFORMATION_SCHEMA") + Column("TABLE_CATALOG", CoerceUnicode, key="table_catalog"), + Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"), + Column("TABLE_NAME", CoerceUnicode, key="table_name"), + Column( + "TABLE_TYPE", String(convert_unicode=True), + key="table_type"), + schema="INFORMATION_SCHEMA") columns = Table("COLUMNS", ischema, - Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"), - Column("TABLE_NAME", CoerceUnicode, key="table_name"), - Column("COLUMN_NAME", CoerceUnicode, key="column_name"), - Column("IS_NULLABLE", Integer, key="is_nullable"), - Column("DATA_TYPE", String, key="data_type"), - Column("ORDINAL_POSITION", Integer, key="ordinal_position"), - Column("CHARACTER_MAXIMUM_LENGTH", Integer, key="character_maximum_length"), - Column("NUMERIC_PRECISION", Integer, key="numeric_precision"), - Column("NUMERIC_SCALE", Integer, key="numeric_scale"), - Column("COLUMN_DEFAULT", Integer, key="column_default"), - Column("COLLATION_NAME", String, key="collation_name"), - schema="INFORMATION_SCHEMA") + Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"), + Column("TABLE_NAME", CoerceUnicode, key="table_name"), + Column("COLUMN_NAME", CoerceUnicode, key="column_name"), + Column("IS_NULLABLE", Integer, key="is_nullable"), + Column("DATA_TYPE", String, key="data_type"), + Column("ORDINAL_POSITION", Integer, key="ordinal_position"), + Column("CHARACTER_MAXIMUM_LENGTH", Integer, + key="character_maximum_length"), + Column("NUMERIC_PRECISION", Integer, key="numeric_precision"), + Column("NUMERIC_SCALE", Integer, key="numeric_scale"), + Column("COLUMN_DEFAULT", Integer, key="column_default"), + Column("COLLATION_NAME", String, key="collation_name"), + schema="INFORMATION_SCHEMA") constraints = Table("TABLE_CONSTRAINTS", ischema, - Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"), - Column("TABLE_NAME", CoerceUnicode, key="table_name"), - Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"), - Column("CONSTRAINT_TYPE", String(convert_unicode=True), key="constraint_type"), - schema="INFORMATION_SCHEMA") + Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"), + Column("TABLE_NAME", CoerceUnicode, key="table_name"), + Column("CONSTRAINT_NAME", CoerceUnicode, + key="constraint_name"), + Column("CONSTRAINT_TYPE", String( + convert_unicode=True), key="constraint_type"), + schema="INFORMATION_SCHEMA") column_constraints = Table("CONSTRAINT_COLUMN_USAGE", ischema, - Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"), - Column("TABLE_NAME", CoerceUnicode, key="table_name"), - Column("COLUMN_NAME", CoerceUnicode, key="column_name"), - Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"), - schema="INFORMATION_SCHEMA") + Column("TABLE_SCHEMA", CoerceUnicode, + key="table_schema"), + Column("TABLE_NAME", CoerceUnicode, + key="table_name"), + Column("COLUMN_NAME", CoerceUnicode, + key="column_name"), + Column("CONSTRAINT_NAME", CoerceUnicode, + key="constraint_name"), + schema="INFORMATION_SCHEMA") key_constraints = Table("KEY_COLUMN_USAGE", ischema, - Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"), - Column("TABLE_NAME", CoerceUnicode, key="table_name"), - Column("COLUMN_NAME", CoerceUnicode, key="column_name"), - Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"), - Column("ORDINAL_POSITION", Integer, key="ordinal_position"), - schema="INFORMATION_SCHEMA") + Column("TABLE_SCHEMA", CoerceUnicode, + key="table_schema"), + Column("TABLE_NAME", CoerceUnicode, + key="table_name"), + Column("COLUMN_NAME", CoerceUnicode, + key="column_name"), + Column("CONSTRAINT_NAME", CoerceUnicode, + key="constraint_name"), + Column("ORDINAL_POSITION", Integer, + key="ordinal_position"), + schema="INFORMATION_SCHEMA") ref_constraints = Table("REFERENTIAL_CONSTRAINTS", ischema, - Column("CONSTRAINT_CATALOG", CoerceUnicode, key="constraint_catalog"), - Column("CONSTRAINT_SCHEMA", CoerceUnicode, key="constraint_schema"), - Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"), - # TODO: is CATLOG misspelled ? - Column("UNIQUE_CONSTRAINT_CATLOG", CoerceUnicode, - key="unique_constraint_catalog"), - - Column("UNIQUE_CONSTRAINT_SCHEMA", CoerceUnicode, - key="unique_constraint_schema"), - Column("UNIQUE_CONSTRAINT_NAME", CoerceUnicode, - key="unique_constraint_name"), - Column("MATCH_OPTION", String, key="match_option"), - Column("UPDATE_RULE", String, key="update_rule"), - Column("DELETE_RULE", String, key="delete_rule"), - schema="INFORMATION_SCHEMA") + Column("CONSTRAINT_CATALOG", CoerceUnicode, + key="constraint_catalog"), + Column("CONSTRAINT_SCHEMA", CoerceUnicode, + key="constraint_schema"), + Column("CONSTRAINT_NAME", CoerceUnicode, + key="constraint_name"), + # TODO: is CATLOG misspelled ? + Column("UNIQUE_CONSTRAINT_CATLOG", CoerceUnicode, + key="unique_constraint_catalog"), + + Column("UNIQUE_CONSTRAINT_SCHEMA", CoerceUnicode, + key="unique_constraint_schema"), + Column("UNIQUE_CONSTRAINT_NAME", CoerceUnicode, + key="unique_constraint_name"), + Column("MATCH_OPTION", String, key="match_option"), + Column("UPDATE_RULE", String, key="update_rule"), + Column("DELETE_RULE", String, key="delete_rule"), + schema="INFORMATION_SCHEMA") views = Table("VIEWS", ischema, - Column("TABLE_CATALOG", CoerceUnicode, key="table_catalog"), - Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"), - Column("TABLE_NAME", CoerceUnicode, key="table_name"), - Column("VIEW_DEFINITION", CoerceUnicode, key="view_definition"), - Column("CHECK_OPTION", String, key="check_option"), - Column("IS_UPDATABLE", String, key="is_updatable"), - schema="INFORMATION_SCHEMA") + Column("TABLE_CATALOG", CoerceUnicode, key="table_catalog"), + Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"), + Column("TABLE_NAME", CoerceUnicode, key="table_name"), + Column("VIEW_DEFINITION", CoerceUnicode, key="view_definition"), + Column("CHECK_OPTION", String, key="check_option"), + Column("IS_UPDATABLE", String, key="is_updatable"), + schema="INFORMATION_SCHEMA") diff --git a/lib/sqlalchemy/dialects/mssql/mxodbc.py b/lib/sqlalchemy/dialects/mssql/mxodbc.py index 5b686c47a2..5e20ed11b1 100644 --- a/lib/sqlalchemy/dialects/mssql/mxodbc.py +++ b/lib/sqlalchemy/dialects/mssql/mxodbc.py @@ -1,5 +1,6 @@ # mssql/mxodbc.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -46,8 +47,8 @@ from ...connectors.mxodbc import MxODBCConnector from .pyodbc import MSExecutionContext_pyodbc, _MSNumeric_pyodbc from .base import (MSDialect, - MSSQLStrictCompiler, - _MSDateTime, _MSDate, _MSTime) + MSSQLStrictCompiler, + _MSDateTime, _MSDate, _MSTime) class _MSNumeric_mxodbc(_MSNumeric_pyodbc): @@ -81,7 +82,7 @@ class MSExecutionContext_mxodbc(MSExecutionContext_pyodbc): SELECT SCOPE_IDENTITY in cases where OUTPUT clause does not work (tables with insert triggers). """ - #todo - investigate whether the pyodbc execution context + # todo - investigate whether the pyodbc execution context # is really only being used in cases where OUTPUT # won't work. @@ -90,7 +91,7 @@ class MSDialect_mxodbc(MxODBCConnector, MSDialect): # this is only needed if "native ODBC" mode is used, # which is now disabled by default. - #statement_compiler = MSSQLStrictCompiler + # statement_compiler = MSSQLStrictCompiler execution_ctx_cls = MSExecutionContext_mxodbc diff --git a/lib/sqlalchemy/dialects/mssql/pymssql.py b/lib/sqlalchemy/dialects/mssql/pymssql.py index 0182fee1b5..32e3bd9226 100644 --- a/lib/sqlalchemy/dialects/mssql/pymssql.py +++ b/lib/sqlalchemy/dialects/mssql/pymssql.py @@ -1,5 +1,6 @@ # mssql/pymssql.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -8,7 +9,8 @@ .. dialect:: mssql+pymssql :name: pymssql :dbapi: pymssql - :connectstring: mssql+pymssql://:@?charset=utf8 + :connectstring: mssql+pymssql://:@/?\ +charset=utf8 :url: http://pymssql.org/ pymssql is a Python module that provides a Python DBAPI interface around @@ -44,14 +46,15 @@ class MSDialect_pymssql(MSDialect): @classmethod def dbapi(cls): module = __import__('pymssql') - # pymmsql doesn't have a Binary method. we use string - # TODO: monkeypatching here is less than ideal - module.Binary = lambda x: x if hasattr(x, 'decode') else str(x) - + # pymmsql < 2.1.1 doesn't have a Binary method. we use string client_ver = tuple(int(x) for x in module.__version__.split(".")) + if client_ver < (2, 1, 1): + # TODO: monkeypatching here is less than ideal + module.Binary = lambda x: x if hasattr(x, 'decode') else str(x) + if client_ver < (1, ): util.warn("The pymssql dialect expects at least " - "the 1.0 series of the pymssql DBAPI.") + "the 1.0 series of the pymssql DBAPI.") return module def __init__(self, **params): @@ -61,7 +64,7 @@ def __init__(self, **params): def _get_server_version_info(self, connection): vers = connection.scalar("select @@version") m = re.match( - r"Microsoft SQL Server.*? - (\d+).(\d+).(\d+).(\d+)", vers) + r"Microsoft .*? - (\d+).(\d+).(\d+).(\d+)", vers) if m: return tuple(int(x) for x in m.group(1, 2, 3, 4)) else: @@ -82,7 +85,8 @@ def is_disconnect(self, e, connection, cursor): "message 20003", # connection timeout "Error 10054", "Not connected to any MS SQL server", - "Connection is closed" + "Connection is closed", + "message 20006", # Write to the server failed ): if msg in str(e): return True diff --git a/lib/sqlalchemy/dialects/mssql/pyodbc.py b/lib/sqlalchemy/dialects/mssql/pyodbc.py index 8c43eb8a1c..45c091cfb8 100644 --- a/lib/sqlalchemy/dialects/mssql/pyodbc.py +++ b/lib/sqlalchemy/dialects/mssql/pyodbc.py @@ -1,5 +1,6 @@ # mssql/pyodbc.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -11,87 +12,70 @@ :connectstring: mssql+pyodbc://:@ :url: http://pypi.python.org/pypi/pyodbc/ -Additional Connection Examples -------------------------------- +Connecting to PyODBC +-------------------- -Examples of pyodbc connection string URLs: +The URL here is to be translated to PyODBC connection strings, as +detailed in `ConnectionStrings `_. -* ``mssql+pyodbc://mydsn`` - connects using the specified DSN named ``mydsn``. - The connection string that is created will appear like:: +DSN Connections +^^^^^^^^^^^^^^^ - dsn=mydsn;Trusted_Connection=Yes +A DSN-based connection is **preferred** overall when using ODBC. A +basic DSN-based connection looks like:: -* ``mssql+pyodbc://user:pass@mydsn`` - connects using the DSN named - ``mydsn`` passing in the ``UID`` and ``PWD`` information. The - connection string that is created will appear like:: + engine = create_engine("mssql+pyodbc://scott:tiger@some_dsn") - dsn=mydsn;UID=user;PWD=pass - -* ``mssql+pyodbc://user:pass@mydsn/?LANGUAGE=us_english`` - connects - using the DSN named ``mydsn`` passing in the ``UID`` and ``PWD`` - information, plus the additional connection configuration option - ``LANGUAGE``. The connection string that is created will appear - like:: - - dsn=mydsn;UID=user;PWD=pass;LANGUAGE=us_english - -* ``mssql+pyodbc://user:pass@host/db`` - connects using a connection - that would appear like:: +Which above, will pass the following connection string to PyODBC:: - DRIVER={SQL Server};Server=host;Database=db;UID=user;PWD=pass - -* ``mssql+pyodbc://user:pass@host:123/db`` - connects using a connection - string which includes the port - information using the comma syntax. This will create the following - connection string:: - - DRIVER={SQL Server};Server=host,123;Database=db;UID=user;PWD=pass + dsn=mydsn;UID=user;PWD=pass -* ``mssql+pyodbc://user:pass@host/db?port=123`` - connects using a connection - string that includes the port - information as a separate ``port`` keyword. This will create the - following connection string:: +If the username and password are omitted, the DSN form will also add +the ``Trusted_Connection=yes`` directive to the ODBC string. - DRIVER={SQL Server};Server=host;Database=db;UID=user;PWD=pass;port=123 +Hostname Connections +^^^^^^^^^^^^^^^^^^^^ -* ``mssql+pyodbc://user:pass@host/db?driver=MyDriver`` - connects using a connection - string that includes a custom - ODBC driver name. This will create the following connection string:: +Hostname-based connections are **not preferred**, however are supported. +The ODBC driver name must be explicitly specified:: - DRIVER={MyDriver};Server=host;Database=db;UID=user;PWD=pass + engine = create_engine("mssql+pyodbc://scott:tiger@myhost:port/databasename?driver=SQL+Server+Native+Client+10.0") -If you require a connection string that is outside the options -presented above, use the ``odbc_connect`` keyword to pass in a -urlencoded connection string. What gets passed in will be urldecoded -and passed directly. +.. versionchanged:: 1.0.0 Hostname-based PyODBC connections now require the + SQL Server driver name specified explicitly. SQLAlchemy cannot + choose an optimal default here as it varies based on platform + and installed drivers. -For example:: +Other keywords interpreted by the Pyodbc dialect to be passed to +``pyodbc.connect()`` in both the DSN and hostname cases include: +``odbc_autotranslate``, ``ansi``, ``unicode_results``, ``autocommit``. - mssql+pyodbc:///?odbc_connect=dsn%3Dmydsn%3BDatabase%3Ddb +Pass through exact Pyodbc string +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -would create the following connection string:: +A PyODBC connection string can also be sent exactly as specified in +`ConnectionStrings `_ +into the driver using the parameter ``odbc_connect``. The delimeters must be URL escaped, however, +as illustrated below using ``urllib.quote_plus``:: - dsn=mydsn;Database=db + import urllib + params = urllib.quote_plus("DRIVER={SQL Server Native Client 10.0};SERVER=dagger;DATABASE=test;UID=user;PWD=password") -Encoding your connection string can be easily accomplished through -the python shell. For example:: + engine = create_engine("mssql+pyodbc:///?odbc_connect=%s" % params) - >>> import urllib - >>> urllib.quote_plus('dsn=mydsn;Database=db') - 'dsn%3Dmydsn%3BDatabase%3Ddb' Unicode Binds ------------- The current state of PyODBC on a unix backend with FreeTDS and/or -EasySoft is poor regarding unicode; different OS platforms and versions of UnixODBC -versus IODBC versus FreeTDS/EasySoft versus PyODBC itself dramatically -alter how strings are received. The PyODBC dialect attempts to use all the information -it knows to determine whether or not a Python unicode literal can be -passed directly to the PyODBC driver or not; while SQLAlchemy can encode -these to bytestrings first, some users have reported that PyODBC mis-handles -bytestrings for certain encodings and requires a Python unicode object, -while the author has observed widespread cases where a Python unicode +EasySoft is poor regarding unicode; different OS platforms and versions of +UnixODBC versus IODBC versus FreeTDS/EasySoft versus PyODBC itself +dramatically alter how strings are received. The PyODBC dialect attempts to +use all the information it knows to determine whether or not a Python unicode +literal can be passed directly to the PyODBC driver or not; while SQLAlchemy +can encode these to bytestrings first, some users have reported that PyODBC +mis-handles bytestrings for certain encodings and requires a Python unicode +object, while the author has observed widespread cases where a Python unicode is completely misinterpreted by PyODBC, particularly when dealing with the information schema tables used in table reflection, and the value must first be encoded to a bytestring. @@ -109,13 +93,21 @@ .. versionadded:: 0.7.7 ``supports_unicode_binds`` parameter to ``create_engine()``\ . +Rowcount Support +---------------- + +Pyodbc only has partial support for rowcount. See the notes at +:ref:`mssql_rowcount_versioning` for important notes when using ORM +versioning. + """ -from .base import MSExecutionContext, MSDialect +from .base import MSExecutionContext, MSDialect, VARBINARY from ...connectors.pyodbc import PyODBCConnector from ... import types as sqltypes, util import decimal + class _ms_numeric_pyodbc(object): """Turns Decimals with adjusted() < 0 or > 7 into strings. @@ -128,7 +120,7 @@ class _ms_numeric_pyodbc(object): def bind_processor(self, dialect): super_process = super(_ms_numeric_pyodbc, self).\ - bind_processor(dialect) + bind_processor(dialect) if not dialect._need_decimal_fix: return super_process @@ -154,38 +146,57 @@ def process(value): def _small_dec_to_string(self, value): return "%s0.%s%s" % ( - (value < 0 and '-' or ''), - '0' * (abs(value.adjusted()) - 1), - "".join([str(nint) for nint in value.as_tuple()[1]])) + (value < 0 and '-' or ''), + '0' * (abs(value.adjusted()) - 1), + "".join([str(nint) for nint in value.as_tuple()[1]])) def _large_dec_to_string(self, value): _int = value.as_tuple()[1] if 'E' in str(value): result = "%s%s%s" % ( - (value < 0 and '-' or ''), - "".join([str(s) for s in _int]), - "0" * (value.adjusted() - (len(_int) - 1))) + (value < 0 and '-' or ''), + "".join([str(s) for s in _int]), + "0" * (value.adjusted() - (len(_int) - 1))) else: if (len(_int) - 1) > value.adjusted(): result = "%s%s.%s" % ( - (value < 0 and '-' or ''), - "".join( - [str(s) for s in _int][0:value.adjusted() + 1]), - "".join( - [str(s) for s in _int][value.adjusted() + 1:])) + (value < 0 and '-' or ''), + "".join( + [str(s) for s in _int][0:value.adjusted() + 1]), + "".join( + [str(s) for s in _int][value.adjusted() + 1:])) else: result = "%s%s" % ( - (value < 0 and '-' or ''), - "".join( - [str(s) for s in _int][0:value.adjusted() + 1])) + (value < 0 and '-' or ''), + "".join( + [str(s) for s in _int][0:value.adjusted() + 1])) return result + class _MSNumeric_pyodbc(_ms_numeric_pyodbc, sqltypes.Numeric): pass + class _MSFloat_pyodbc(_ms_numeric_pyodbc, sqltypes.Float): pass + +class _VARBINARY_pyodbc(VARBINARY): + def bind_processor(self, dialect): + if dialect.dbapi is None: + return None + + DBAPIBinary = dialect.dbapi.Binary + + def process(value): + if value is not None: + return DBAPIBinary(value) + else: + # pyodbc-specific + return dialect.dbapi.BinaryNull + return process + + class MSExecutionContext_pyodbc(MSExecutionContext): _embedded_scope_identity = False @@ -238,23 +249,24 @@ class MSDialect_pyodbc(PyODBCConnector, MSDialect): execution_ctx_cls = MSExecutionContext_pyodbc - pyodbc_driver_name = 'SQL Server' - colspecs = util.update_copy( MSDialect.colspecs, { sqltypes.Numeric: _MSNumeric_pyodbc, - sqltypes.Float: _MSFloat_pyodbc + sqltypes.Float: _MSFloat_pyodbc, + VARBINARY: _VARBINARY_pyodbc, + sqltypes.LargeBinary: _VARBINARY_pyodbc, } ) def __init__(self, description_encoding=None, **params): + if 'description_encoding' in params: + self.description_encoding = params.pop('description_encoding') super(MSDialect_pyodbc, self).__init__(**params) - self.description_encoding = description_encoding self.use_scope_identity = self.use_scope_identity and \ - self.dbapi and \ - hasattr(self.dbapi.Cursor, 'nextset') + self.dbapi and \ + hasattr(self.dbapi.Cursor, 'nextset') self._need_decimal_fix = self.dbapi and \ - self._dbapi_version() < (2, 1, 8) + self._dbapi_version() < (2, 1, 8) dialect = MSDialect_pyodbc diff --git a/lib/sqlalchemy/dialects/mssql/zxjdbc.py b/lib/sqlalchemy/dialects/mssql/zxjdbc.py index 706eef3a4f..0bf68c2a21 100644 --- a/lib/sqlalchemy/dialects/mssql/zxjdbc.py +++ b/lib/sqlalchemy/dialects/mssql/zxjdbc.py @@ -1,5 +1,6 @@ # mssql/zxjdbc.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -8,9 +9,12 @@ .. dialect:: mssql+zxjdbc :name: zxJDBC for Jython :dbapi: zxjdbc - :connectstring: mssql+zxjdbc://user:pass@host:port/dbname[?key=value&key=value...] + :connectstring: mssql+zxjdbc://user:pass@host:port/dbname\ +[?key=value&key=value...] :driverurl: http://jtds.sourceforge.net/ + .. note:: Jython is not supported by current versions of SQLAlchemy. The + zxjdbc dialect should be considered as experimental. """ from ...connectors.zxJDBC import ZxJDBCConnector @@ -41,12 +45,12 @@ def post_exec(self): self._lastrowid = int(row[0]) if (self.isinsert or self.isupdate or self.isdelete) and \ - self.compiled.returning: + self.compiled.returning: self._result_proxy = engine.FullyBufferedResultProxy(self) if self._enable_identity_insert: table = self.dialect.identifier_preparer.format_table( - self.compiled.statement.table) + self.compiled.statement.table) self.cursor.execute("SET IDENTITY_INSERT %s OFF" % table) @@ -58,8 +62,8 @@ class MSDialect_zxjdbc(ZxJDBCConnector, MSDialect): def _get_server_version_info(self, connection): return tuple( - int(x) - for x in connection.connection.dbversion.split('.') - ) + int(x) + for x in connection.connection.dbversion.split('.') + ) dialect = MSDialect_zxjdbc diff --git a/lib/sqlalchemy/dialects/mysql/__init__.py b/lib/sqlalchemy/dialects/mysql/__init__.py index 4eb8cc6d23..fabd932e7b 100644 --- a/lib/sqlalchemy/dialects/mysql/__init__.py +++ b/lib/sqlalchemy/dialects/mysql/__init__.py @@ -1,12 +1,13 @@ # mysql/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php from . import base, mysqldb, oursql, \ - pyodbc, zxjdbc, mysqlconnector, pymysql,\ - gaerdbms, cymysql + pyodbc, zxjdbc, mysqlconnector, pymysql,\ + gaerdbms, cymysql # default dialect base.dialect = mysqldb.dialect @@ -21,8 +22,10 @@ VARBINARY, VARCHAR, YEAR, dialect __all__ = ( -'BIGINT', 'BINARY', 'BIT', 'BLOB', 'BOOLEAN', 'CHAR', 'DATE', 'DATETIME', 'DECIMAL', 'DOUBLE', -'ENUM', 'DECIMAL', 'FLOAT', 'INTEGER', 'INTEGER', 'LONGBLOB', 'LONGTEXT', 'MEDIUMBLOB', 'MEDIUMINT', -'MEDIUMTEXT', 'NCHAR', 'NVARCHAR', 'NUMERIC', 'SET', 'SMALLINT', 'REAL', 'TEXT', 'TIME', 'TIMESTAMP', -'TINYBLOB', 'TINYINT', 'TINYTEXT', 'VARBINARY', 'VARCHAR', 'YEAR', 'dialect' + 'BIGINT', 'BINARY', 'BIT', 'BLOB', 'BOOLEAN', 'CHAR', 'DATE', 'DATETIME', + 'DECIMAL', 'DOUBLE', 'ENUM', 'DECIMAL', 'FLOAT', 'INTEGER', 'INTEGER', + 'LONGBLOB', 'LONGTEXT', 'MEDIUMBLOB', 'MEDIUMINT', 'MEDIUMTEXT', 'NCHAR', + 'NVARCHAR', 'NUMERIC', 'SET', 'SMALLINT', 'REAL', 'TEXT', 'TIME', + 'TIMESTAMP', 'TINYBLOB', 'TINYINT', 'TINYTEXT', 'VARBINARY', 'VARCHAR', + 'YEAR', 'dialect' ) diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index ba6e7b6258..5763eda02a 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -1,5 +1,6 @@ # mysql/base.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -25,22 +26,29 @@ Connection Timeouts ------------------- -MySQL features an automatic connection close behavior, for connections that have -been idle for eight hours or more. To circumvent having this issue, use the -``pool_recycle`` option which controls the maximum age of any connection:: +MySQL features an automatic connection close behavior, for connections that +have been idle for eight hours or more. To circumvent having this issue, use +the ``pool_recycle`` option which controls the maximum age of any connection:: engine = create_engine('mysql+mysqldb://...', pool_recycle=3600) +.. seealso:: + + :ref:`pool_setting_recycle` - full description of the pool recycle feature. + + .. _mysql_storage_engines: CREATE TABLE arguments including Storage Engines ------------------------------------------------ MySQL's CREATE TABLE syntax includes a wide array of special options, -including ``ENGINE``, ``CHARSET``, ``MAX_ROWS``, ``ROW_FORMAT``, ``INSERT_METHOD``, and many more. +including ``ENGINE``, ``CHARSET``, ``MAX_ROWS``, ``ROW_FORMAT``, +``INSERT_METHOD``, and many more. To accommodate the rendering of these arguments, specify the form ``mysql_argument_name="value"``. For example, to specify a table with -``ENGINE`` of ``InnoDB``, ``CHARSET`` of ``utf8``, and ``KEY_BLOCK_SIZE`` of ``1024``:: +``ENGINE`` of ``InnoDB``, ``CHARSET`` of ``utf8``, and ``KEY_BLOCK_SIZE`` +of ``1024``:: Table('mytable', metadata, Column('data', String(32)), @@ -49,26 +57,28 @@ mysql_key_block_size="1024" ) -The MySQL dialect will normally transfer any keyword specified as ``mysql_keyword_name`` -to be rendered as ``KEYWORD_NAME`` in the ``CREATE TABLE`` statement. A handful -of these names will render with a space instead of an underscore; to support this, -the MySQL dialect has awareness of these particular names, which include -``DATA DIRECTORY`` (e.g. ``mysql_data_directory``), ``CHARACTER SET`` (e.g. -``mysql_character_set``) and ``INDEX DIRECTORY`` (e.g. ``mysql_index_directory``). - -The most common argument is ``mysql_engine``, which refers to the storage engine -for the table. Historically, MySQL server installations would default +The MySQL dialect will normally transfer any keyword specified as +``mysql_keyword_name`` to be rendered as ``KEYWORD_NAME`` in the +``CREATE TABLE`` statement. A handful of these names will render with a space +instead of an underscore; to support this, the MySQL dialect has awareness of +these particular names, which include ``DATA DIRECTORY`` +(e.g. ``mysql_data_directory``), ``CHARACTER SET`` (e.g. +``mysql_character_set``) and ``INDEX DIRECTORY`` (e.g. +``mysql_index_directory``). + +The most common argument is ``mysql_engine``, which refers to the storage +engine for the table. Historically, MySQL server installations would default to ``MyISAM`` for this value, although newer versions may be defaulting to ``InnoDB``. The ``InnoDB`` engine is typically preferred for its support of transactions and foreign keys. A :class:`.Table` that is created in a MySQL database with a storage engine -of ``MyISAM`` will be essentially non-transactional, meaning any INSERT/UPDATE/DELETE -statement referring to this table will be invoked as autocommit. It also will have no -support for foreign key constraints; while the ``CREATE TABLE`` statement -accepts foreign key options, when using the ``MyISAM`` storage engine these -arguments are discarded. Reflecting such a table will also produce no -foreign key constraint information. +of ``MyISAM`` will be essentially non-transactional, meaning any +INSERT/UPDATE/DELETE statement referring to this table will be invoked as +autocommit. It also will have no support for foreign key constraints; while +the ``CREATE TABLE`` statement accepts foreign key options, when using the +``MyISAM`` storage engine these arguments are discarded. Reflecting such a +table will also produce no foreign key constraint information. For fully atomic transactions as well as support for foreign key constraints, all participating ``CREATE TABLE`` statements must specify a @@ -96,28 +106,47 @@ database itself, especially if database reflection features are to be used. +.. _mysql_isolation_level: + Transaction Isolation Level --------------------------- -:func:`.create_engine` accepts an ``isolation_level`` -parameter which results in the command ``SET SESSION -TRANSACTION ISOLATION LEVEL `` being invoked for -every new connection. Valid values for this parameter are -``READ COMMITTED``, ``READ UNCOMMITTED``, -``REPEATABLE READ``, and ``SERIALIZABLE``:: +All MySQL dialects support setting of transaction isolation level +both via a dialect-specific parameter :paramref:`.create_engine.isolation_level` +accepted by :func:`.create_engine`, +as well as the :paramref:`.Connection.execution_options.isolation_level` +argument as passed to :meth:`.Connection.execution_options`. +This feature works by issuing the command +``SET SESSION TRANSACTION ISOLATION LEVEL `` for +each new connection. + +To set isolation level using :func:`.create_engine`:: engine = create_engine( "mysql://scott:tiger@localhost/test", isolation_level="READ UNCOMMITTED" ) -.. versionadded:: 0.7.6 +To set using per-connection execution options:: + + connection = engine.connect() + connection = connection.execution_options( + isolation_level="READ COMMITTED" + ) + +Valid values for ``isolation_level`` include: + +* ``READ COMMITTED`` +* ``READ UNCOMMITTED`` +* ``REPEATABLE READ`` +* ``SERIALIZABLE`` AUTO_INCREMENT Behavior ----------------------- When creating tables, SQLAlchemy will automatically set ``AUTO_INCREMENT`` on -the first :class:`.Integer` primary key column which is not marked as a foreign key:: +the first :class:`.Integer` primary key column which is not marked as a +foreign key:: >>> t = Table('mytable', metadata, ... Column('mytable_id', Integer, primary_key=True) @@ -128,16 +157,100 @@ PRIMARY KEY (id) ) -You can disable this behavior by passing ``False`` to the :paramref:`~.Column.autoincrement` -argument of :class:`.Column`. This flag can also be used to enable -auto-increment on a secondary column in a multi-column key for some storage -engines:: +You can disable this behavior by passing ``False`` to the +:paramref:`~.Column.autoincrement` argument of :class:`.Column`. This flag +can also be used to enable auto-increment on a secondary column in a +multi-column key for some storage engines:: Table('mytable', metadata, Column('gid', Integer, primary_key=True, autoincrement=False), Column('id', Integer, primary_key=True) ) +.. _mysql_unicode: + +Unicode +------- + +Charset Selection +~~~~~~~~~~~~~~~~~ + +Most MySQL DBAPIs offer the option to set the client character set for +a connection. This is typically delivered using the ``charset`` parameter +in the URL, such as:: + + e = create_engine("mysql+pymysql://scott:tiger@localhost/\ +test?charset=utf8") + +This charset is the **client character set** for the connection. Some +MySQL DBAPIs will default this to a value such as ``latin1``, and some +will make use of the ``default-character-set`` setting in the ``my.cnf`` +file as well. Documentation for the DBAPI in use should be consulted +for specific behavior. + +The encoding used for Unicode has traditionally been ``'utf8'``. However, +for MySQL versions 5.5.3 on forward, a new MySQL-specific encoding +``'utf8mb4'`` has been introduced. The rationale for this new encoding +is due to the fact that MySQL's utf-8 encoding only supports +codepoints up to three bytes instead of four. Therefore, +when communicating with a MySQL database +that includes codepoints more than three bytes in size, +this new charset is preferred, if supported by both the database as well +as the client DBAPI, as in:: + + e = create_engine("mysql+pymysql://scott:tiger@localhost/\ +test?charset=utf8mb4") + +At the moment, up-to-date versions of MySQLdb and PyMySQL support the +``utf8mb4`` charset. Other DBAPIs such as MySQL-Connector and OurSQL +may **not** support it as of yet. + +In order to use ``utf8mb4`` encoding, changes to +the MySQL schema and/or server configuration may be required. + +.. seealso:: + + `The utf8mb4 Character Set \ +`_ - \ +in the MySQL documentation + +Unicode Encoding / Decoding +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +All modern MySQL DBAPIs all offer the service of handling the encoding and +decoding of unicode data between the Python application space and the database. +As this was not always the case, SQLAlchemy also includes a comprehensive system +of performing the encode/decode task as well. As only one of these systems +should be in use at at time, SQLAlchemy has long included functionality +to automatically detect upon first connection whether or not the DBAPI is +automatically handling unicode. + +Whether or not the MySQL DBAPI will handle encoding can usually be configured +using a DBAPI flag ``use_unicode``, which is known to be supported at least +by MySQLdb, PyMySQL, and MySQL-Connector. Setting this value to ``0`` +in the "connect args" or query string will have the effect of disabling the +DBAPI's handling of unicode, such that it instead will return data of the +``str`` type or ``bytes`` type, with data in the configured charset:: + + # connect while disabling the DBAPI's unicode encoding/decoding + e = create_engine("mysql+mysqldb://scott:tiger@localhost/test?charset=utf8&use_unicode=0") + +Current recommendations for modern DBAPIs are as follows: + +* It is generally always safe to leave the ``use_unicode`` flag set at + its default; that is, don't use it at all. +* Under Python 3, the ``use_unicode=0`` flag should **never be used**. + SQLAlchemy under Python 3 generally assumes the DBAPI receives and returns + string values as Python 3 strings, which are inherently unicode objects. +* Under Python 2 with MySQLdb, the ``use_unicode=0`` flag will **offer + superior performance**, as MySQLdb's unicode converters under Python 2 only + have been observed to have unusually slow performance compared to SQLAlchemy's + fast C-based encoders/decoders. + +In short: don't specify ``use_unicode`` *at all*, with the possible +exception of ``use_unicode=0`` on MySQLdb with Python 2 **only** for a +potential performance gain. + Ansi Quoting Style ------------------ @@ -182,28 +295,26 @@ usual definition of "number of rows matched by an UPDATE or DELETE" statement. This is in contradiction to the default setting on most MySQL DBAPI drivers, which is "number of rows actually modified/deleted". For this reason, the -SQLAlchemy MySQL dialects always set the ``constants.CLIENT.FOUND_ROWS`` flag, -or whatever is equivalent for the DBAPI in use, on connect, unless the flag value -is overridden using DBAPI-specific options -(such as ``client_flag`` for the MySQL-Python driver, ``found_rows`` for the -OurSQL driver). +SQLAlchemy MySQL dialects always add the ``constants.CLIENT.FOUND_ROWS`` +flag, or whatever is equivalent for the target dialect, upon connection. +This setting is currently hardcoded. -See also: +.. seealso:: -:attr:`.ResultProxy.rowcount` + :attr:`.ResultProxy.rowcount` CAST Support ------------ -MySQL documents the CAST operator as available in version 4.0.2. When using the -SQLAlchemy :func:`.cast` function, SQLAlchemy -will not render the CAST token on MySQL before this version, based on server version -detection, instead rendering the internal expression directly. +MySQL documents the CAST operator as available in version 4.0.2. When using +the SQLAlchemy :func:`.cast` function, SQLAlchemy +will not render the CAST token on MySQL before this version, based on server +version detection, instead rendering the internal expression directly. -CAST may still not be desirable on an early MySQL version post-4.0.2, as it didn't -add all datatype support until 4.1.1. If your application falls into this -narrow area, the behavior of CAST can be controlled using the +CAST may still not be desirable on an early MySQL version post-4.0.2, as it +didn't add all datatype support until 4.1.1. If your application falls into +this narrow area, the behavior of CAST can be controlled using the :ref:`sqlalchemy.ext.compiler_toplevel` system, as per the recipe below:: from sqlalchemy.sql.expression import Cast @@ -240,7 +351,8 @@ def _check_mysql_version(element, compiler, **kw): Index('my_index', my_table.c.data, mysql_length=10) - Index('a_b_idx', my_table.c.a, my_table.c.b, mysql_length={'a': 4, 'b': 9}) + Index('a_b_idx', my_table.c.a, my_table.c.b, mysql_length={'a': 4, + 'b': 9}) Prefix lengths are given in characters for nonbinary string types and in bytes for binary string types. The value passed to the keyword argument *must* be @@ -288,10 +400,10 @@ def _check_mysql_version(element, compiler, **kw): MySQL does not support the foreign key arguments "DEFERRABLE", "INITIALLY", or "MATCH". Using the ``deferrable`` or ``initially`` keyword argument with -:class:`.ForeignKeyConstraint` or :class:`.ForeignKey` will have the effect of these keywords being -rendered in a DDL expression, which will then raise an error on MySQL. -In order to use these keywords on a foreign key while having them ignored -on a MySQL backend, use a custom compile rule:: +:class:`.ForeignKeyConstraint` or :class:`.ForeignKey` will have the effect of +these keywords being rendered in a DDL expression, which will then raise an +error on MySQL. In order to use these keywords on a foreign key while having +them ignored on a MySQL backend, use a custom compile rule:: from sqlalchemy.ext.compiler import compiles from sqlalchemy.schema import ForeignKeyConstraint @@ -302,19 +414,20 @@ def process(element, compiler, **kw): return compiler.visit_foreign_key_constraint(element, **kw) .. versionchanged:: 0.9.0 - the MySQL backend no longer silently ignores - the ``deferrable`` or ``initially`` keyword arguments of :class:`.ForeignKeyConstraint` - and :class:`.ForeignKey`. + the ``deferrable`` or ``initially`` keyword arguments of + :class:`.ForeignKeyConstraint` and :class:`.ForeignKey`. The "MATCH" keyword is in fact more insidious, and is explicitly disallowed -by SQLAlchemy in conjunction with the MySQL backend. This argument is silently -ignored by MySQL, but in addition has the effect of ON UPDATE and ON DELETE options -also being ignored by the backend. Therefore MATCH should never be used with the -MySQL backend; as is the case with DEFERRABLE and INITIALLY, custom compilation -rules can be used to correct a MySQL ForeignKeyConstraint at DDL definition time. +by SQLAlchemy in conjunction with the MySQL backend. This argument is +silently ignored by MySQL, but in addition has the effect of ON UPDATE and ON +DELETE options also being ignored by the backend. Therefore MATCH should +never be used with the MySQL backend; as is the case with DEFERRABLE and +INITIALLY, custom compilation rules can be used to correct a MySQL +ForeignKeyConstraint at DDL definition time. -.. versionadded:: 0.9.0 - the MySQL backend will raise a :class:`.CompileError` - when the ``match`` keyword is used with :class:`.ForeignKeyConstraint` - or :class:`.ForeignKey`. +.. versionadded:: 0.9.0 - the MySQL backend will raise a + :class:`.CompileError` when the ``match`` keyword is used with + :class:`.ForeignKeyConstraint` or :class:`.ForeignKey`. Reflection of Foreign Key Constraints ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -333,6 +446,110 @@ def process(element, compiler, **kw): :ref:`mysql_storage_engines` +.. _mysql_unique_constraints: + +MySQL Unique Constraints and Reflection +--------------------------------------- + +SQLAlchemy supports both the :class:`.Index` construct with the +flag ``unique=True``, indicating a UNIQUE index, as well as the +:class:`.UniqueConstraint` construct, representing a UNIQUE constraint. +Both objects/syntaxes are supported by MySQL when emitting DDL to create +these constraints. However, MySQL does not have a unique constraint +construct that is separate from a unique index; that is, the "UNIQUE" +constraint on MySQL is equivalent to creating a "UNIQUE INDEX". + +When reflecting these constructs, the :meth:`.Inspector.get_indexes` +and the :meth:`.Inspector.get_unique_constraints` methods will **both** +return an entry for a UNIQUE index in MySQL. However, when performing +full table reflection using ``Table(..., autoload=True)``, +the :class:`.UniqueConstraint` construct is +**not** part of the fully reflected :class:`.Table` construct under any +circumstances; this construct is always represented by a :class:`.Index` +with the ``unique=True`` setting present in the :attr:`.Table.indexes` +collection. + + +.. _mysql_timestamp_null: + +TIMESTAMP Columns and NULL +-------------------------- + +MySQL historically enforces that a column which specifies the +TIMESTAMP datatype implicitly includes a default value of +CURRENT_TIMESTAMP, even though this is not stated, and additionally +sets the column as NOT NULL, the opposite behavior vs. that of all +other datatypes:: + + mysql> CREATE TABLE ts_test ( + -> a INTEGER, + -> b INTEGER NOT NULL, + -> c TIMESTAMP, + -> d TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + -> e TIMESTAMP NULL); + Query OK, 0 rows affected (0.03 sec) + + mysql> SHOW CREATE TABLE ts_test; + +---------+----------------------------------------------------- + | Table | Create Table + +---------+----------------------------------------------------- + | ts_test | CREATE TABLE `ts_test` ( + `a` int(11) DEFAULT NULL, + `b` int(11) NOT NULL, + `c` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + `d` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `e` timestamp NULL DEFAULT NULL + ) ENGINE=MyISAM DEFAULT CHARSET=latin1 + +Above, we see that an INTEGER column defaults to NULL, unless it is specified +with NOT NULL. But when the column is of type TIMESTAMP, an implicit +default of CURRENT_TIMESTAMP is generated which also coerces the column +to be a NOT NULL, even though we did not specify it as such. + +This behavior of MySQL can be changed on the MySQL side using the +`explicit_defaults_for_timestamp +`_ configuration flag introduced in +MySQL 5.6. With this server setting enabled, TIMESTAMP columns behave like +any other datatype on the MySQL side with regards to defaults and nullability. + +However, to accommodate the vast majority of MySQL databases that do not +specify this new flag, SQLAlchemy emits the "NULL" specifier explicitly with +any TIMESTAMP column that does not specify ``nullable=False``. In order +to accommodate newer databases that specify ``explicit_defaults_for_timestamp``, +SQLAlchemy also emits NOT NULL for TIMESTAMP columns that do specify +``nullable=False``. The following example illustrates:: + + from sqlalchemy import MetaData, Integer, Table, Column, text + from sqlalchemy.dialects.mysql import TIMESTAMP + + m = MetaData() + t = Table('ts_test', m, + Column('a', Integer), + Column('b', Integer, nullable=False), + Column('c', TIMESTAMP), + Column('d', TIMESTAMP, nullable=False) + ) + + + from sqlalchemy import create_engine + e = create_engine("mysql://scott:tiger@localhost/test", echo=True) + m.create_all(e) + +output:: + + CREATE TABLE ts_test ( + a INTEGER, + b INTEGER NOT NULL, + c TIMESTAMP NULL, + d TIMESTAMP NOT NULL + ) + +.. versionchanged:: 1.0.0 - SQLAlchemy now renders NULL or NOT NULL in all + cases for TIMESTAMP columns, to accommodate + ``explicit_defaults_for_timestamp``. Prior to this version, it will + not render "NOT NULL" for a TIMESTAMP column that is ``nullable=False``. + """ import datetime @@ -349,7 +566,7 @@ def process(element, compiler, **kw): from ... import types as sqltypes from ...util import topological from ...types import DATE, BOOLEAN, \ - BLOB, BINARY, VARBINARY + BLOB, BINARY, VARBINARY RESERVED_WORDS = set( ['accessible', 'add', 'all', 'alter', 'analyze', 'and', 'as', 'asc', @@ -363,15 +580,15 @@ def process(element, compiler, **kw): 'deterministic', 'distinct', 'distinctrow', 'div', 'double', 'drop', 'dual', 'each', 'else', 'elseif', 'enclosed', 'escaped', 'exists', 'exit', 'explain', 'false', 'fetch', 'float', 'float4', 'float8', - 'for', 'force', 'foreign', 'from', 'fulltext', 'grant', 'group', 'having', - 'high_priority', 'hour_microsecond', 'hour_minute', 'hour_second', 'if', - 'ignore', 'in', 'index', 'infile', 'inner', 'inout', 'insensitive', - 'insert', 'int', 'int1', 'int2', 'int3', 'int4', 'int8', 'integer', - 'interval', 'into', 'is', 'iterate', 'join', 'key', 'keys', 'kill', - 'leading', 'leave', 'left', 'like', 'limit', 'linear', 'lines', 'load', - 'localtime', 'localtimestamp', 'lock', 'long', 'longblob', 'longtext', - 'loop', 'low_priority', 'master_ssl_verify_server_cert', 'match', - 'mediumblob', 'mediumint', 'mediumtext', 'middleint', + 'for', 'force', 'foreign', 'from', 'fulltext', 'grant', 'group', + 'having', 'high_priority', 'hour_microsecond', 'hour_minute', + 'hour_second', 'if', 'ignore', 'in', 'index', 'infile', 'inner', 'inout', + 'insensitive', 'insert', 'int', 'int1', 'int2', 'int3', 'int4', 'int8', + 'integer', 'interval', 'into', 'is', 'iterate', 'join', 'key', 'keys', + 'kill', 'leading', 'leave', 'left', 'like', 'limit', 'linear', 'lines', + 'load', 'localtime', 'localtimestamp', 'lock', 'long', 'longblob', + 'longtext', 'loop', 'low_priority', 'master_ssl_verify_server_cert', + 'match', 'mediumblob', 'mediumint', 'mediumtext', 'middleint', 'minute_microsecond', 'minute_second', 'mod', 'modifies', 'natural', 'not', 'no_write_to_binlog', 'null', 'numeric', 'on', 'optimize', 'option', 'optionally', 'or', 'order', 'out', 'outer', 'outfile', @@ -396,11 +613,13 @@ def process(element, compiler, **kw): 'read_only', 'read_write', # 5.1 'general', 'ignore_server_ids', 'master_heartbeat_period', 'maxvalue', - 'resignal', 'signal', 'slow', # 5.5 + 'resignal', 'signal', 'slow', # 5.5 - 'get', 'io_after_gtids', 'io_before_gtids', 'master_bind', 'one_shot', + 'get', 'io_after_gtids', 'io_before_gtids', 'master_bind', 'one_shot', 'partition', 'sql_after_gtids', 'sql_before_gtids', # 5.6 + 'generated', 'optimizer_costs', 'stored', 'virtual', # 5.7 + ]) AUTOCOMMIT_RE = re.compile( @@ -426,7 +645,8 @@ def __init__(self, unsigned=False, zerofill=False, **kw): def __repr__(self): return util.generic_repr(self, - to_inspect=[_NumericType, sqltypes.Numeric]) + to_inspect=[_NumericType, sqltypes.Numeric]) + class _FloatType(_NumericType, sqltypes.Float): def __init__(self, precision=None, scale=None, asdecimal=True, **kw): @@ -434,16 +654,19 @@ def __init__(self, precision=None, scale=None, asdecimal=True, **kw): ( (precision is None and scale is not None) or (precision is not None and scale is None) - ): + ): raise exc.ArgumentError( "You must specify both precision and scale or omit " "both altogether.") - super(_FloatType, self).__init__(precision=precision, asdecimal=asdecimal, **kw) + super(_FloatType, self).__init__( + precision=precision, asdecimal=asdecimal, **kw) self.scale = scale def __repr__(self): - return util.generic_repr(self, - to_inspect=[_FloatType, _NumericType, sqltypes.Float]) + return util.generic_repr(self, to_inspect=[_FloatType, + _NumericType, + sqltypes.Float]) + class _IntegerType(_NumericType, sqltypes.Integer): def __init__(self, display_width=None, **kw): @@ -451,8 +674,10 @@ def __init__(self, display_width=None, **kw): super(_IntegerType, self).__init__(**kw) def __repr__(self): - return util.generic_repr(self, - to_inspect=[_IntegerType, _NumericType, sqltypes.Integer]) + return util.generic_repr(self, to_inspect=[_IntegerType, + _NumericType, + sqltypes.Integer]) + class _StringType(sqltypes.String): """Base for MySQL string types.""" @@ -473,7 +698,16 @@ def __init__(self, charset=None, collation=None, def __repr__(self): return util.generic_repr(self, - to_inspect=[_StringType, sqltypes.String]) + to_inspect=[_StringType, sqltypes.String]) + + +class _MatchType(sqltypes.Float, sqltypes.MatchType): + def __init__(self, **kw): + # TODO: float arguments? + sqltypes.Float.__init__(self) + sqltypes.MatchType.__init__(self) + + class NUMERIC(_NumericType, sqltypes.NUMERIC): """MySQL NUMERIC type.""" @@ -497,7 +731,7 @@ def __init__(self, precision=None, scale=None, asdecimal=True, **kw): """ super(NUMERIC, self).__init__(precision=precision, - scale=scale, asdecimal=asdecimal, **kw) + scale=scale, asdecimal=asdecimal, **kw) class DECIMAL(_NumericType, sqltypes.DECIMAL): @@ -536,10 +770,10 @@ def __init__(self, precision=None, scale=None, asdecimal=True, **kw): .. note:: The :class:`.DOUBLE` type by default converts from float - to Decimal, using a truncation that defaults to 10 digits. Specify - either ``scale=n`` or ``decimal_return_scale=n`` in order to change - this scale, or ``asdecimal=False`` to return values directly as - Python floating points. + to Decimal, using a truncation that defaults to 10 digits. + Specify either ``scale=n`` or ``decimal_return_scale=n`` in order + to change this scale, or ``asdecimal=False`` to return values + directly as Python floating points. :param precision: Total digits in this number. If scale and precision are both None, values are stored to limits allowed by the server. @@ -569,10 +803,10 @@ def __init__(self, precision=None, scale=None, asdecimal=True, **kw): .. note:: The :class:`.REAL` type by default converts from float - to Decimal, using a truncation that defaults to 10 digits. Specify - either ``scale=n`` or ``decimal_return_scale=n`` in order to change - this scale, or ``asdecimal=False`` to return values directly as - Python floating points. + to Decimal, using a truncation that defaults to 10 digits. + Specify either ``scale=n`` or ``decimal_return_scale=n`` in order + to change this scale, or ``asdecimal=False`` to return values + directly as Python floating points. :param precision: Total digits in this number. If scale and precision are both None, values are stored to limits allowed by the server. @@ -727,9 +961,9 @@ def __init__(self, display_width=None, **kw): class BIT(sqltypes.TypeEngine): """MySQL BIT type. - This type is for MySQL 5.0.3 or greater for MyISAM, and 5.0.5 or greater for - MyISAM, MEMORY, InnoDB and BDB. For older versions, use a MSTinyInteger() - type. + This type is for MySQL 5.0.3 or greater for MyISAM, and 5.0.5 or greater + for MyISAM, MEMORY, InnoDB and BDB. For older versions, use a + MSTinyInteger() type. """ @@ -754,7 +988,9 @@ def result_processor(self, dialect, coltype): def process(value): if value is not None: v = 0 - for i in map(ord, value): + for i in value: + if not isinstance(i, int): + i = ord(i) # convert byte to int on Python 2 v = v << 8 | i return v return value @@ -1074,11 +1310,12 @@ def _adapt_string_for_cast(self, type_): ascii=type_.ascii, binary=type_.binary, unicode=type_.unicode, - national=False # not supported in CAST + national=False # not supported in CAST ) else: return CHAR(length=type_.length) + class NVARCHAR(_StringType, sqltypes.NVARCHAR): """MySQL NVARCHAR type. @@ -1148,6 +1385,7 @@ class LONGBLOB(sqltypes._Binary): __visit_name__ = 'LONGBLOB' + class _EnumeratedValues(_StringType): def _init_values(self, values, kw): self.quoting = kw.pop('quoting', 'auto') @@ -1190,6 +1428,7 @@ def _strip_values(cls, values): strip_values.append(a) return strip_values + class ENUM(sqltypes.Enum, _EnumeratedValues): """MySQL ENUM type.""" @@ -1252,12 +1491,13 @@ def __init__(self, *enums, **kw): kw.pop('quote', None) kw.pop('native_enum', None) kw.pop('inherit_schema', None) + kw.pop('_create_events', None) _StringType.__init__(self, length=length, **kw) sqltypes.Enum.__init__(self, *values) def __repr__(self): - return util.generic_repr(self, - to_inspect=[ENUM, _StringType, sqltypes.Enum]) + return util.generic_repr( + self, to_inspect=[ENUM, _StringType, sqltypes.Enum]) def bind_processor(self, dialect): super_convert = super(ENUM, self).bind_processor(dialect) @@ -1265,7 +1505,7 @@ def bind_processor(self, dialect): def process(value): if self.strict and value is not None and value not in self.enums: raise exc.InvalidRequestError('"%s" not a valid value for ' - 'this enum' % value) + 'this enum' % value) if super_convert: return super_convert(value) else: @@ -1290,32 +1530,28 @@ def __init__(self, *values, **kw): Column('myset', SET("foo", "bar", "baz")) - :param values: The range of valid values for this SET. Values will be - quoted when generating the schema according to the quoting flag (see - below). - .. versionchanged:: 0.9.0 quoting is applied automatically to - :class:`.mysql.SET` in the same way as for :class:`.mysql.ENUM`. + The list of potential values is required in the case that this + set will be used to generate DDL for a table, or if the + :paramref:`.SET.retrieve_as_bitwise` flag is set to True. - :param charset: Optional, a column-level character set for this string - value. Takes precedence to 'ascii' or 'unicode' short-hand. + :param values: The range of valid values for this SET. - :param collation: Optional, a column-level collation for this string - value. Takes precedence to 'binary' short-hand. + :param convert_unicode: Same flag as that of + :paramref:`.String.convert_unicode`. - :param ascii: Defaults to False: short-hand for the ``latin1`` - character set, generates ASCII in schema. + :param collation: same as that of :paramref:`.String.collation` - :param unicode: Defaults to False: short-hand for the ``ucs2`` - character set, generates UNICODE in schema. + :param charset: same as that of :paramref:`.VARCHAR.charset`. - :param binary: Defaults to False: short-hand, pick the binary - collation type that matches the column's character set. Generates - BINARY in schema. This does not affect the type of data stored, - only the collation of character data. + :param ascii: same as that of :paramref:`.VARCHAR.ascii`. - :param quoting: Defaults to 'auto': automatically determine enum value - quoting. If all enum values are surrounded by the same quoting + :param unicode: same as that of :paramref:`.VARCHAR.unicode`. + + :param binary: same as that of :paramref:`.VARCHAR.binary`. + + :param quoting: Defaults to 'auto': automatically determine set value + quoting. If all values are surrounded by the same quoting character, then use 'quoted' mode. Otherwise, use 'unquoted' mode. 'quoted': values in enums are already quoted, they will be used @@ -1330,49 +1566,117 @@ def __init__(self, *values, **kw): .. versionadded:: 0.9.0 + :param retrieve_as_bitwise: if True, the data for the set type will be + persisted and selected using an integer value, where a set is coerced + into a bitwise mask for persistence. MySQL allows this mode which + has the advantage of being able to store values unambiguously, + such as the blank string ``''``. The datatype will appear + as the expression ``col + 0`` in a SELECT statement, so that the + value is coerced into an integer value in result sets. + This flag is required if one wishes + to persist a set that can store the blank string ``''`` as a value. + + .. warning:: + + When using :paramref:`.mysql.SET.retrieve_as_bitwise`, it is + essential that the list of set values is expressed in the + **exact same order** as exists on the MySQL database. + + .. versionadded:: 1.0.0 + + """ + self.retrieve_as_bitwise = kw.pop('retrieve_as_bitwise', False) values, length = self._init_values(values, kw) self.values = tuple(values) - + if not self.retrieve_as_bitwise and '' in values: + raise exc.ArgumentError( + "Can't use the blank value '' in a SET without " + "setting retrieve_as_bitwise=True") + if self.retrieve_as_bitwise: + self._bitmap = dict( + (value, 2 ** idx) + for idx, value in enumerate(self.values) + ) + self._bitmap.update( + (2 ** idx, value) + for idx, value in enumerate(self.values) + ) kw.setdefault('length', length) super(SET, self).__init__(**kw) + def column_expression(self, colexpr): + if self.retrieve_as_bitwise: + return colexpr + 0 + else: + return colexpr + def result_processor(self, dialect, coltype): - def process(value): - # The good news: - # No ',' quoting issues- commas aren't allowed in SET values - # The bad news: - # Plenty of driver inconsistencies here. - if isinstance(value, set): - # ..some versions convert '' to an empty set - if not value: - value.add('') - return value - # ...and some versions return strings - if value is not None: - return set(value.split(',')) - else: - return value + if self.retrieve_as_bitwise: + def process(value): + if value is not None: + value = int(value) + + return set( + util.map_bits(self._bitmap.__getitem__, value) + ) + else: + return None + else: + super_convert = super(SET, self).result_processor(dialect, coltype) + + def process(value): + if isinstance(value, util.string_types): + # MySQLdb returns a string, let's parse + if super_convert: + value = super_convert(value) + return set(re.findall(r'[^,]+', value)) + else: + # mysql-connector-python does a naive + # split(",") which throws in an empty string + if value is not None: + value.discard('') + return value return process def bind_processor(self, dialect): super_convert = super(SET, self).bind_processor(dialect) + if self.retrieve_as_bitwise: + def process(value): + if value is None: + return None + elif isinstance(value, util.int_types + util.string_types): + if super_convert: + return super_convert(value) + else: + return value + else: + int_value = 0 + for v in value: + int_value |= self._bitmap[v] + return int_value + else: - def process(value): - if value is None or isinstance(value, util.int_types + util.string_types): - pass - else: - if None in value: - value = set(value) - value.remove(None) - value.add('') - value = ','.join(value) - if super_convert: - return super_convert(value) - else: - return value + def process(value): + # accept strings and int (actually bitflag) values directly + if value is not None and not isinstance( + value, util.int_types + util.string_types): + value = ",".join(value) + + if super_convert: + return super_convert(value) + else: + return value return process + def adapt(self, impltype, **kw): + kw['retrieve_as_bitwise'] = self.retrieve_as_bitwise + return util.constructor_copy( + self, impltype, + *self.values, + **kw + ) + # old names MSTime = TIME MSSet = SET @@ -1413,6 +1717,7 @@ def process(value): sqltypes.Float: FLOAT, sqltypes.Time: TIME, sqltypes.Enum: ENUM, + sqltypes.MatchType: _MatchType } # Everything 3.23 through 5.1 excepting OpenGIS types. @@ -1479,18 +1784,21 @@ def visit_sysdate_func(self, fn, **kw): def visit_concat_op_binary(self, binary, operator, **kw): return "concat(%s, %s)" % (self.process(binary.left), - self.process(binary.right)) + self.process(binary.right)) def visit_match_op_binary(self, binary, operator, **kw): return "MATCH (%s) AGAINST (%s IN BOOLEAN MODE)" % \ - (self.process(binary.left), self.process(binary.right)) + (self.process(binary.left), self.process(binary.right)) def get_from_hint_text(self, table, text): return text - def visit_typeclause(self, typeclause): - type_ = typeclause.type.dialect_impl(self.dialect) - if isinstance(type_, sqltypes.Integer): + def visit_typeclause(self, typeclause, type_=None): + if type_ is None: + type_ = typeclause.type.dialect_impl(self.dialect) + if isinstance(type_, sqltypes.TypeDecorator): + return self.visit_typeclause(typeclause, type_.impl) + elif isinstance(type_, sqltypes.Integer): if getattr(type_, 'unsigned', False): return 'UNSIGNED INTEGER' else: @@ -1498,26 +1806,34 @@ def visit_typeclause(self, typeclause): elif isinstance(type_, sqltypes.TIMESTAMP): return 'DATETIME' elif isinstance(type_, (sqltypes.DECIMAL, sqltypes.DateTime, - sqltypes.Date, sqltypes.Time)): + sqltypes.Date, sqltypes.Time)): return self.dialect.type_compiler.process(type_) - elif isinstance(type_, sqltypes.String) and not isinstance(type_, (ENUM, SET)): + elif isinstance(type_, sqltypes.String) \ + and not isinstance(type_, (ENUM, SET)): adapted = CHAR._adapt_string_for_cast(type_) return self.dialect.type_compiler.process(adapted) elif isinstance(type_, sqltypes._Binary): return 'BINARY' elif isinstance(type_, sqltypes.NUMERIC): return self.dialect.type_compiler.process( - type_).replace('NUMERIC', 'DECIMAL') + type_).replace('NUMERIC', 'DECIMAL') else: return None def visit_cast(self, cast, **kwargs): # No cast until 4, no decimals until 5. if not self.dialect._supports_cast: + util.warn( + "Current MySQL version does not support " + "CAST; the CAST will be skipped.") return self.process(cast.clause.self_group()) type_ = self.process(cast.typeclause) if type_ is None: + util.warn( + "Datatype %s does not support CAST on MySQL; " + "the CAST will be skipped." % + self.dialect.type_compiler.process(cast.typeclause.type)) return self.process(cast.clause.self_group()) return 'CAST(%s AS %s)' % (self.process(cast.clause), type_) @@ -1528,7 +1844,15 @@ def render_literal_value(self, value, type_): value = value.replace('\\', '\\\\') return value - def get_select_precolumns(self, select): + # override native_boolean=False behavior here, as + # MySQL still supports native boolean + def visit_true(self, element, **kw): + return "true" + + def visit_false(self, element, **kw): + return "false" + + def get_select_precolumns(self, select, **kw): """Add special MySQL keywords in place of DISTINCT. .. note:: @@ -1553,13 +1877,13 @@ def visit_join(self, join, asfrom=False, **kwargs): " ON ", self.process(join.onclause, **kwargs))) - def for_update_clause(self, select): + def for_update_clause(self, select, **kw): if select._for_update_arg.read: return " LOCK IN SHARE MODE" else: return " FOR UPDATE" - def limit_clause(self, select): + def limit_clause(self, select, **kw): # MySQL supports: # LIMIT # LIMIT , @@ -1568,15 +1892,16 @@ def limit_clause(self, select): # The latter is more readable for offsets but we're stuck with the # former until we can refine dialects by server revision. - limit, offset = select._limit, select._offset + limit_clause, offset_clause = select._limit_clause, \ + select._offset_clause - if (limit, offset) == (None, None): + if limit_clause is None and offset_clause is None: return '' - elif offset is not None: + elif offset_clause is not None: # As suggested by the MySQL docs, need to apply an # artificial limit if one wasn't provided # http://dev.mysql.com/doc/refman/5.0/en/select.html - if limit is None: + if limit_clause is None: # hardwire the upper limit. Currently # needed by OurSQL with Python 3 # (https://bugs.launchpad.net/oursql/+bug/686232), @@ -1584,15 +1909,15 @@ def limit_clause(self, select): # bound as part of MySQL's "syntax" for OFFSET with # no LIMIT return ' \n LIMIT %s, %s' % ( - self.process(sql.literal(offset)), - "18446744073709551615") + self.process(offset_clause, **kw), + "18446744073709551615") else: return ' \n LIMIT %s, %s' % ( - self.process(sql.literal(offset)), - self.process(sql.literal(limit))) + self.process(offset_clause, **kw), + self.process(limit_clause, **kw)) else: # No offset provided, so just use the limit - return ' \n LIMIT %s' % (self.process(sql.literal(limit)),) + return ' \n LIMIT %s' % (self.process(limit_clause, **kw),) def update_limit_clause(self, update_stmt): limit = update_stmt.kwargs.get('%s_limit' % self.dialect.name, None) @@ -1601,12 +1926,13 @@ def update_limit_clause(self, update_stmt): else: return None - def update_tables_clause(self, update_stmt, from_table, extra_froms, **kw): + def update_tables_clause(self, update_stmt, from_table, + extra_froms, **kw): return ', '.join(t._compiler_dispatch(self, asfrom=True, **kw) - for t in [from_table] + list(extra_froms)) + for t in [from_table] + list(extra_froms)) def update_from_clause(self, update_stmt, from_table, - extra_froms, from_hints, **kw): + extra_froms, from_hints, **kw): return None @@ -1616,14 +1942,15 @@ def update_from_clause(self, update_stmt, from_table, # creation of foreign key constraints fails." class MySQLDDLCompiler(compiler.DDLCompiler): - def create_table_constraints(self, table): + def create_table_constraints(self, table, **kw): """Get table constraints.""" constraint_string = super( - MySQLDDLCompiler, self).create_table_constraints(table) + MySQLDDLCompiler, self).create_table_constraints(table, **kw) # why self.dialect.name and not 'mysql'? because of drizzle is_innodb = 'engine' in table.dialect_options[self.dialect.name] and \ - table.dialect_options[self.dialect.name]['engine'].lower() == 'innodb' + table.dialect_options[self.dialect.name][ + 'engine'].lower() == 'innodb' auto_inc_column = table._autoincrement_column @@ -1633,34 +1960,40 @@ def create_table_constraints(self, table): if constraint_string: constraint_string += ", \n\t" constraint_string += "KEY %s (%s)" % ( - self.preparer.quote( - "idx_autoinc_%s" % auto_inc_column.name - ), - self.preparer.format_column(auto_inc_column) - ) + self.preparer.quote( + "idx_autoinc_%s" % auto_inc_column.name + ), + self.preparer.format_column(auto_inc_column) + ) return constraint_string def get_column_specification(self, column, **kw): """Builds column DDL.""" - colspec = [self.preparer.format_column(column), - self.dialect.type_compiler.process(column.type) - ] - - default = self.get_column_default_string(column) - if default is not None: - colspec.append('DEFAULT ' + default) + colspec = [ + self.preparer.format_column(column), + self.dialect.type_compiler.process( + column.type, type_expression=column) + ] is_timestamp = isinstance(column.type, sqltypes.TIMESTAMP) - if not column.nullable and not is_timestamp: + + if not column.nullable: colspec.append('NOT NULL') - elif column.nullable and is_timestamp and default is None: + # see: http://docs.sqlalchemy.org/en/latest/dialects/ + # mysql.html#mysql_timestamp_null + elif column.nullable and is_timestamp: colspec.append('NULL') - if column is column.table._autoincrement_column and \ - column.server_default is None: + default = self.get_column_default_string(column) + if default is not None: + colspec.append('DEFAULT ' + default) + + if column.table is not None \ + and column is column.table._autoincrement_column and \ + column.server_default is None: colspec.append('AUTO_INCREMENT') return ' '.join(colspec) @@ -1696,7 +2029,8 @@ def post_create_table(self, table): joiner = '=' if opt in ('TABLESPACE', 'DEFAULT CHARACTER SET', - 'CHARACTER SET', 'COLLATE', 'PARTITION BY', 'PARTITIONS'): + 'CHARACTER SET', 'COLLATE', + 'PARTITION BY', 'PARTITIONS'): joiner = ' ' table_opts.append(joiner.join((opt, arg))) @@ -1708,8 +2042,8 @@ def visit_create_index(self, create): preparer = self.preparer table = preparer.format_table(index.table) columns = [self.sql_compiler.process(expr, include_table=False, - literal_binds=True) - for expr in index.expressions] + literal_binds=True) + for expr in index.expressions] name = self._prepared_index_name(index) @@ -1722,12 +2056,17 @@ def visit_create_index(self, create): if length is not None: if isinstance(length, dict): - # length value can be a (column_name --> integer value) mapping - # specifying the prefix length for each column of the index + # length value can be a (column_name --> integer value) + # mapping specifying the prefix length for each column of the + # index columns = ', '.join( - ('%s(%d)' % (col, length[col]) - if col in length else '%s' % col) - for col in columns + '%s(%d)' % (expr, length[col.name]) if col.name in length + else + ( + '%s(%d)' % (expr, length[expr]) if expr in length + else '%s' % expr + ) + for col, expr in zip(index.expressions, columns) ) else: # or can be an integer value specifying the same @@ -1758,9 +2097,9 @@ def visit_drop_index(self, drop): index = drop.element return "\nDROP INDEX %s ON %s" % ( - self._prepared_index_name(index, - include_schema=False), - self.preparer.format_table(index.table)) + self._prepared_index_name(index, + include_schema=False), + self.preparer.format_table(index.table)) def visit_drop_constraint(self, drop): constraint = drop.element @@ -1777,16 +2116,17 @@ def visit_drop_constraint(self, drop): qual = "" const = self.preparer.format_constraint(constraint) return "ALTER TABLE %s DROP %s%s" % \ - (self.preparer.format_table(constraint.table), - qual, const) + (self.preparer.format_table(constraint.table), + qual, const) def define_constraint_match(self, constraint): if constraint.match is not None: raise exc.CompileError( - "MySQL ignores the 'MATCH' keyword while at the same time " - "causes ON UPDATE/ON DELETE clauses to be ignored.") + "MySQL ignores the 'MATCH' keyword while at the same time " + "causes ON UPDATE/ON DELETE clauses to be ignored.") return "" + class MySQLTypeCompiler(compiler.GenericTypeCompiler): def _extend_numeric(self, type_, spec): "Extend a numeric-type declaration with MySQL specific extensions." @@ -1835,210 +2175,213 @@ def attr(name): def _mysql_type(self, type_): return isinstance(type_, (_StringType, _NumericType)) - def visit_NUMERIC(self, type_): + def visit_NUMERIC(self, type_, **kw): if type_.precision is None: return self._extend_numeric(type_, "NUMERIC") elif type_.scale is None: return self._extend_numeric(type_, - "NUMERIC(%(precision)s)" % - {'precision': type_.precision}) + "NUMERIC(%(precision)s)" % + {'precision': type_.precision}) else: return self._extend_numeric(type_, - "NUMERIC(%(precision)s, %(scale)s)" % - {'precision': type_.precision, - 'scale': type_.scale}) + "NUMERIC(%(precision)s, %(scale)s)" % + {'precision': type_.precision, + 'scale': type_.scale}) - def visit_DECIMAL(self, type_): + def visit_DECIMAL(self, type_, **kw): if type_.precision is None: return self._extend_numeric(type_, "DECIMAL") elif type_.scale is None: return self._extend_numeric(type_, - "DECIMAL(%(precision)s)" % - {'precision': type_.precision}) + "DECIMAL(%(precision)s)" % + {'precision': type_.precision}) else: return self._extend_numeric(type_, - "DECIMAL(%(precision)s, %(scale)s)" % - {'precision': type_.precision, - 'scale': type_.scale}) + "DECIMAL(%(precision)s, %(scale)s)" % + {'precision': type_.precision, + 'scale': type_.scale}) - def visit_DOUBLE(self, type_): + def visit_DOUBLE(self, type_, **kw): if type_.precision is not None and type_.scale is not None: return self._extend_numeric(type_, - "DOUBLE(%(precision)s, %(scale)s)" % - {'precision': type_.precision, - 'scale': type_.scale}) + "DOUBLE(%(precision)s, %(scale)s)" % + {'precision': type_.precision, + 'scale': type_.scale}) else: return self._extend_numeric(type_, 'DOUBLE') - def visit_REAL(self, type_): + def visit_REAL(self, type_, **kw): if type_.precision is not None and type_.scale is not None: return self._extend_numeric(type_, - "REAL(%(precision)s, %(scale)s)" % - {'precision': type_.precision, - 'scale': type_.scale}) + "REAL(%(precision)s, %(scale)s)" % + {'precision': type_.precision, + 'scale': type_.scale}) else: return self._extend_numeric(type_, 'REAL') - def visit_FLOAT(self, type_): + def visit_FLOAT(self, type_, **kw): if self._mysql_type(type_) and \ - type_.scale is not None and \ - type_.precision is not None: - return self._extend_numeric(type_, - "FLOAT(%s, %s)" % (type_.precision, type_.scale)) + type_.scale is not None and \ + type_.precision is not None: + return self._extend_numeric( + type_, "FLOAT(%s, %s)" % (type_.precision, type_.scale)) elif type_.precision is not None: return self._extend_numeric(type_, - "FLOAT(%s)" % (type_.precision,)) + "FLOAT(%s)" % (type_.precision,)) else: return self._extend_numeric(type_, "FLOAT") - def visit_INTEGER(self, type_): + def visit_INTEGER(self, type_, **kw): if self._mysql_type(type_) and type_.display_width is not None: - return self._extend_numeric(type_, - "INTEGER(%(display_width)s)" % - {'display_width': type_.display_width}) + return self._extend_numeric( + type_, "INTEGER(%(display_width)s)" % + {'display_width': type_.display_width}) else: return self._extend_numeric(type_, "INTEGER") - def visit_BIGINT(self, type_): + def visit_BIGINT(self, type_, **kw): if self._mysql_type(type_) and type_.display_width is not None: - return self._extend_numeric(type_, - "BIGINT(%(display_width)s)" % - {'display_width': type_.display_width}) + return self._extend_numeric( + type_, "BIGINT(%(display_width)s)" % + {'display_width': type_.display_width}) else: return self._extend_numeric(type_, "BIGINT") - def visit_MEDIUMINT(self, type_): + def visit_MEDIUMINT(self, type_, **kw): if self._mysql_type(type_) and type_.display_width is not None: - return self._extend_numeric(type_, - "MEDIUMINT(%(display_width)s)" % - {'display_width': type_.display_width}) + return self._extend_numeric( + type_, "MEDIUMINT(%(display_width)s)" % + {'display_width': type_.display_width}) else: return self._extend_numeric(type_, "MEDIUMINT") - def visit_TINYINT(self, type_): + def visit_TINYINT(self, type_, **kw): if self._mysql_type(type_) and type_.display_width is not None: return self._extend_numeric(type_, "TINYINT(%s)" % type_.display_width) else: return self._extend_numeric(type_, "TINYINT") - def visit_SMALLINT(self, type_): + def visit_SMALLINT(self, type_, **kw): if self._mysql_type(type_) and type_.display_width is not None: return self._extend_numeric(type_, - "SMALLINT(%(display_width)s)" % - {'display_width': type_.display_width} - ) + "SMALLINT(%(display_width)s)" % + {'display_width': type_.display_width} + ) else: return self._extend_numeric(type_, "SMALLINT") - def visit_BIT(self, type_): + def visit_BIT(self, type_, **kw): if type_.length is not None: return "BIT(%s)" % type_.length else: return "BIT" - def visit_DATETIME(self, type_): + def visit_DATETIME(self, type_, **kw): if getattr(type_, 'fsp', None): return "DATETIME(%d)" % type_.fsp else: return "DATETIME" - def visit_DATE(self, type_): + def visit_DATE(self, type_, **kw): return "DATE" - def visit_TIME(self, type_): + def visit_TIME(self, type_, **kw): if getattr(type_, 'fsp', None): return "TIME(%d)" % type_.fsp else: return "TIME" - def visit_TIMESTAMP(self, type_): + def visit_TIMESTAMP(self, type_, **kw): if getattr(type_, 'fsp', None): return "TIMESTAMP(%d)" % type_.fsp else: return "TIMESTAMP" - def visit_YEAR(self, type_): + def visit_YEAR(self, type_, **kw): if type_.display_width is None: return "YEAR" else: return "YEAR(%s)" % type_.display_width - def visit_TEXT(self, type_): + def visit_TEXT(self, type_, **kw): if type_.length: return self._extend_string(type_, {}, "TEXT(%d)" % type_.length) else: return self._extend_string(type_, {}, "TEXT") - def visit_TINYTEXT(self, type_): + def visit_TINYTEXT(self, type_, **kw): return self._extend_string(type_, {}, "TINYTEXT") - def visit_MEDIUMTEXT(self, type_): + def visit_MEDIUMTEXT(self, type_, **kw): return self._extend_string(type_, {}, "MEDIUMTEXT") - def visit_LONGTEXT(self, type_): + def visit_LONGTEXT(self, type_, **kw): return self._extend_string(type_, {}, "LONGTEXT") - def visit_VARCHAR(self, type_): + def visit_VARCHAR(self, type_, **kw): if type_.length: - return self._extend_string(type_, {}, "VARCHAR(%d)" % type_.length) + return self._extend_string( + type_, {}, "VARCHAR(%d)" % type_.length) else: raise exc.CompileError( - "VARCHAR requires a length on dialect %s" % - self.dialect.name) + "VARCHAR requires a length on dialect %s" % + self.dialect.name) - def visit_CHAR(self, type_): + def visit_CHAR(self, type_, **kw): if type_.length: return self._extend_string(type_, {}, "CHAR(%(length)s)" % - {'length': type_.length}) + {'length': type_.length}) else: return self._extend_string(type_, {}, "CHAR") - def visit_NVARCHAR(self, type_): + def visit_NVARCHAR(self, type_, **kw): # We'll actually generate the equiv. "NATIONAL VARCHAR" instead # of "NVARCHAR". if type_.length: - return self._extend_string(type_, {'national': True}, - "VARCHAR(%(length)s)" % {'length': type_.length}) + return self._extend_string( + type_, {'national': True}, + "VARCHAR(%(length)s)" % {'length': type_.length}) else: raise exc.CompileError( - "NVARCHAR requires a length on dialect %s" % - self.dialect.name) + "NVARCHAR requires a length on dialect %s" % + self.dialect.name) - def visit_NCHAR(self, type_): + def visit_NCHAR(self, type_, **kw): # We'll actually generate the equiv. # "NATIONAL CHAR" instead of "NCHAR". if type_.length: - return self._extend_string(type_, {'national': True}, - "CHAR(%(length)s)" % {'length': type_.length}) + return self._extend_string( + type_, {'national': True}, + "CHAR(%(length)s)" % {'length': type_.length}) else: return self._extend_string(type_, {'national': True}, "CHAR") - def visit_VARBINARY(self, type_): + def visit_VARBINARY(self, type_, **kw): return "VARBINARY(%d)" % type_.length - def visit_large_binary(self, type_): + def visit_large_binary(self, type_, **kw): return self.visit_BLOB(type_) - def visit_enum(self, type_): + def visit_enum(self, type_, **kw): if not type_.native_enum: return super(MySQLTypeCompiler, self).visit_enum(type_) else: return self._visit_enumerated_values("ENUM", type_, type_.enums) - def visit_BLOB(self, type_): + def visit_BLOB(self, type_, **kw): if type_.length: return "BLOB(%d)" % type_.length else: return "BLOB" - def visit_TINYBLOB(self, type_): + def visit_TINYBLOB(self, type_, **kw): return "TINYBLOB" - def visit_MEDIUMBLOB(self, type_): + def visit_MEDIUMBLOB(self, type_, **kw): return "MEDIUMBLOB" - def visit_LONGBLOB(self, type_): + def visit_LONGBLOB(self, type_, **kw): return "LONGBLOB" def _visit_enumerated_values(self, name, type_, enumerated_values): @@ -2046,18 +2389,18 @@ def _visit_enumerated_values(self, name, type_, enumerated_values): for e in enumerated_values: quoted_enums.append("'%s'" % e.replace("'", "''")) return self._extend_string(type_, {}, "%s(%s)" % ( - name, ",".join(quoted_enums)) - ) + name, ",".join(quoted_enums)) + ) - def visit_ENUM(self, type_): + def visit_ENUM(self, type_, **kw): return self._visit_enumerated_values("ENUM", type_, - type_._enumerated_values) + type_._enumerated_values) - def visit_SET(self, type_): + def visit_SET(self, type_, **kw): return self._visit_enumerated_values("SET", type_, - type_._enumerated_values) + type_._enumerated_values) - def visit_BOOLEAN(self, type): + def visit_BOOLEAN(self, type, **kw): return "BOOL" @@ -2072,9 +2415,9 @@ def __init__(self, dialect, server_ansiquotes=False, **kw): quote = '"' super(MySQLIdentifierPreparer, self).__init__( - dialect, - initial_quote=quote, - escape_quote=quote) + dialect, + initial_quote=quote, + escape_quote=quote) def _quote_free_identifiers(self, *ids): """Unilaterally identifier-quote any number of strings.""" @@ -2084,11 +2427,17 @@ def _quote_free_identifiers(self, *ids): @log.class_logger class MySQLDialect(default.DefaultDialect): - """Details of the MySQL dialect. Not used directly in application code.""" + """Details of the MySQL dialect. + Not used directly in application code. + """ name = 'mysql' supports_alter = True + # MySQL has no true "boolean" type; we + # allow for the "true" and "false" keywords, however + supports_native_boolean = False + # identifiers are 64, however aliases can be 255... max_identifier_length = 255 max_index_name_length = 64 @@ -2143,8 +2492,8 @@ def connect(conn): else: return None - _isolation_lookup = set(['SERIALIZABLE', - 'READ UNCOMMITTED', 'READ COMMITTED', 'REPEATABLE READ']) + _isolation_lookup = set(['SERIALIZABLE', 'READ UNCOMMITTED', + 'READ COMMITTED', 'REPEATABLE READ']) def set_isolation_level(self, connection, level): level = level.replace('_', ' ') @@ -2153,7 +2502,7 @@ def set_isolation_level(self, connection, level): "Invalid value '%s' for isolation_level. " "Valid isolation levels for %s are %s" % (level, self.name, ", ".join(self._isolation_lookup)) - ) + ) cursor = connection.cursor() cursor.execute("SET SESSION TRANSACTION ISOLATION LEVEL %s" % level) cursor.execute("COMMIT") @@ -2172,13 +2521,14 @@ def do_commit(self, dbapi_connection): """Execute a COMMIT.""" # COMMIT/ROLLBACK were introduced in 3.23.15. - # Yes, we have at least one user who has to talk to these old versions! + # Yes, we have at least one user who has to talk to these old + # versions! # - # Ignore commit/rollback if support isn't present, otherwise even basic - # operations via autocommit fail. + # Ignore commit/rollback if support isn't present, otherwise even + # basic operations via autocommit fail. try: dbapi_connection.commit() - except: + except Exception: if self.server_version_info < (3, 23, 15): args = sys.exc_info()[1].args if args and args[0] == 1064: @@ -2190,7 +2540,7 @@ def do_rollback(self, dbapi_connection): try: dbapi_connection.rollback() - except: + except Exception: if self.server_version_info < (3, 23, 15): args = sys.exc_info()[1].args if args and args[0] == 1064: @@ -2221,9 +2571,10 @@ def do_recover_twophase(self, connection): return [row['data'][0:row['gtrid_length']] for row in resultset] def is_disconnect(self, e, connection, cursor): - if isinstance(e, self.dbapi.OperationalError): + if isinstance(e, (self.dbapi.OperationalError, + self.dbapi.ProgrammingError)): return self._extract_error_code(e) in \ - (2006, 2013, 2014, 2045, 2055) + (2006, 2013, 2014, 2045, 2055) elif isinstance(e, self.dbapi.InterfaceError): # if underlying connection is closed, # this is the error you get @@ -2273,7 +2624,8 @@ def has_table(self, connection, table_name, schema=None): rs = None try: try: - rs = connection.execute(st) + rs = connection.execution_options( + skip_user_error_events=True).execute(st) have = rs.fetchone() is not None rs.close() return have @@ -2291,15 +2643,15 @@ def initialize(self, connection): if self._server_ansiquotes: # if ansiquotes == True, build a new IdentifierPreparer # with the new setting - self.identifier_preparer = self.preparer(self, - server_ansiquotes=self._server_ansiquotes) + self.identifier_preparer = self.preparer( + self, server_ansiquotes=self._server_ansiquotes) default.DefaultDialect.initialize(self, connection) @property def _supports_cast(self): return self.server_version_info is None or \ - self.server_version_info >= (4, 0, 2) + self.server_version_info >= (4, 0, 2) @reflection.cache def get_schema_names(self, connection, **kw): @@ -2316,17 +2668,19 @@ def get_table_names(self, connection, schema=None, **kw): charset = self._connection_charset if self.server_version_info < (5, 0, 2): - rp = connection.execute("SHOW TABLES FROM %s" % + rp = connection.execute( + "SHOW TABLES FROM %s" % self.identifier_preparer.quote_identifier(current_schema)) return [row[0] for - row in self._compat_fetchall(rp, charset=charset)] + row in self._compat_fetchall(rp, charset=charset)] else: - rp = connection.execute("SHOW FULL TABLES FROM %s" % - self.identifier_preparer.quote_identifier(current_schema)) + rp = connection.execute( + "SHOW FULL TABLES FROM %s" % + self.identifier_preparer.quote_identifier(current_schema)) return [row[0] - for row in self._compat_fetchall(rp, charset=charset) - if row[1] == 'BASE TABLE'] + for row in self._compat_fetchall(rp, charset=charset) + if row[1] == 'BASE TABLE'] @reflection.cache def get_view_names(self, connection, schema=None, **kw): @@ -2337,29 +2691,30 @@ def get_view_names(self, connection, schema=None, **kw): if self.server_version_info < (5, 0, 2): return self.get_table_names(connection, schema) charset = self._connection_charset - rp = connection.execute("SHOW FULL TABLES FROM %s" % - self.identifier_preparer.quote_identifier(schema)) + rp = connection.execute( + "SHOW FULL TABLES FROM %s" % + self.identifier_preparer.quote_identifier(schema)) return [row[0] - for row in self._compat_fetchall(rp, charset=charset) - if row[1] in ('VIEW', 'SYSTEM VIEW')] + for row in self._compat_fetchall(rp, charset=charset) + if row[1] in ('VIEW', 'SYSTEM VIEW')] @reflection.cache def get_table_options(self, connection, table_name, schema=None, **kw): parsed_state = self._parsed_state_or_create( - connection, table_name, schema, **kw) + connection, table_name, schema, **kw) return parsed_state.table_options @reflection.cache def get_columns(self, connection, table_name, schema=None, **kw): parsed_state = self._parsed_state_or_create( - connection, table_name, schema, **kw) + connection, table_name, schema, **kw) return parsed_state.columns @reflection.cache def get_pk_constraint(self, connection, table_name, schema=None, **kw): parsed_state = self._parsed_state_or_create( - connection, table_name, schema, **kw) + connection, table_name, schema, **kw) for key in parsed_state.keys: if key['type'] == 'PRIMARY': # There can be only one. @@ -2371,7 +2726,7 @@ def get_pk_constraint(self, connection, table_name, schema=None, **kw): def get_foreign_keys(self, connection, table_name, schema=None, **kw): parsed_state = self._parsed_state_or_create( - connection, table_name, schema, **kw) + connection, table_name, schema, **kw) default_schema = None fkeys = [] @@ -2379,7 +2734,8 @@ def get_foreign_keys(self, connection, table_name, schema=None, **kw): for spec in parsed_state.constraints: # only FOREIGN KEYs ref_name = spec['table'][-1] - ref_schema = len(spec['table']) > 1 and spec['table'][-2] or schema + ref_schema = len(spec['table']) > 1 and \ + spec['table'][-2] or schema if not ref_schema: if default_schema is None: @@ -2411,7 +2767,7 @@ def get_foreign_keys(self, connection, table_name, schema=None, **kw): def get_indexes(self, connection, table_name, schema=None, **kw): parsed_state = self._parsed_state_or_create( - connection, table_name, schema, **kw) + connection, table_name, schema, **kw) indexes = [] for spec in parsed_state.keys: @@ -2425,13 +2781,14 @@ def get_indexes(self, connection, table_name, schema=None, **kw): pass else: self.logger.info( - "Converting unknown KEY type %s to a plain KEY" % flavor) + "Converting unknown KEY type %s to a plain KEY", flavor) pass index_d = {} index_d['name'] = spec['name'] index_d['column_names'] = [s[0] for s in spec['columns']] index_d['unique'] = unique - index_d['type'] = flavor + if flavor: + index_d['type'] = flavor indexes.append(index_d) return indexes @@ -2444,7 +2801,8 @@ def get_unique_constraints(self, connection, table_name, return [ { 'name': key['name'], - 'column_names': [col[0] for col in key['columns']] + 'column_names': [col[0] for col in key['columns']], + 'duplicates_index': key['name'], } for key in parsed_state.keys if key['type'] == 'UNIQUE' @@ -2461,13 +2819,13 @@ def get_view_definition(self, connection, view_name, schema=None, **kw): return sql def _parsed_state_or_create(self, connection, table_name, - schema=None, **kw): + schema=None, **kw): return self._setup_parser( - connection, - table_name, - schema, - info_cache=kw.get('info_cache', None) - ) + connection, + table_name, + schema, + info_cache=kw.get('info_cache', None) + ) @util.memoized_property def _tabledef_parser(self): @@ -2492,7 +2850,7 @@ def _setup_parser(self, connection, table_name, schema=None, **kw): schema, table_name)) sql = self._show_create_table(connection, None, charset, full_name=full_name) - if sql.startswith('CREATE ALGORITHM'): + if re.match(r'^CREATE (?:ALGORITHM)?.* VIEW', sql): # Adapt views to something table-like. columns = self._describe_table(connection, None, charset, full_name=full_name) @@ -2514,7 +2872,7 @@ def _detect_casing(self, connection): charset = self._connection_charset row = self._compat_first(connection.execute( "SHOW VARIABLES LIKE 'lower_case_table_names'"), - charset=charset) + charset=charset) if not row: cs = 0 else: @@ -2549,7 +2907,7 @@ def _detect_ansiquotes(self, connection): row = self._compat_first( connection.execute("SHOW VARIABLES LIKE 'sql_mode'"), - charset=self._connection_charset) + charset=self._connection_charset) if not row: mode = '' @@ -2565,7 +2923,6 @@ def _detect_ansiquotes(self, connection): # as of MySQL 5.0.1 self._backslash_escapes = 'NO_BACKSLASH_ESCAPES' not in mode - def _show_create_table(self, connection, table, charset=None, full_name=None): """Run SHOW CREATE TABLE for a ``Table``.""" @@ -2576,7 +2933,8 @@ def _show_create_table(self, connection, table, charset=None, rp = None try: - rp = connection.execute(st) + rp = connection.execution_options( + skip_user_error_events=True).execute(st) except exc.DBAPIError as e: if self._extract_error_code(e.orig) == 1146: raise exc.NoSuchTableError(full_name) @@ -2590,7 +2948,7 @@ def _show_create_table(self, connection, table, charset=None, return sql def _describe_table(self, connection, table, charset=None, - full_name=None): + full_name=None): """Run DESCRIBE for a ``Table`` and return processed rows.""" if full_name is None: @@ -2600,7 +2958,8 @@ def _describe_table(self, connection, table, charset=None, rp, rows = None, None try: try: - rp = connection.execute(st) + rp = connection.execution_options( + skip_user_error_events=True).execute(st) except exc.DBAPIError as e: if self._extract_error_code(e.orig) == 1146: raise exc.NoSuchTableError(full_name) @@ -2682,7 +3041,7 @@ def _parse_constraints(self, line): if m: spec = m.groupdict() spec['table'] = \ - self.preparer.unformat_identifiers(spec['table']) + self.preparer.unformat_identifiers(spec['table']) spec['local'] = [c[0] for c in self._parse_keyexprs(spec['local'])] spec['foreign'] = [c[0] @@ -2762,8 +3121,7 @@ def _parse_column(self, line, state): if not spec['full']: util.warn("Incomplete reflection of column definition %r" % line) - name, type_, args, notnull = \ - spec['name'], spec['coltype'], spec['arg'], spec['notnull'] + name, type_, args = spec['name'], spec['coltype'], spec['arg'] try: col_type = self.dialect.ischema_names[type_] @@ -2782,23 +3140,31 @@ def _parse_column(self, line, state): # Column type keyword options type_kw = {} + + if issubclass(col_type, (DATETIME, TIME, TIMESTAMP)): + if type_args: + type_kw['fsp'] = type_args.pop(0) + for kw in ('unsigned', 'zerofill'): if spec.get(kw, False): type_kw[kw] = True for kw in ('charset', 'collate'): if spec.get(kw, False): type_kw[kw] = spec[kw] - if issubclass(col_type, _EnumeratedValues): type_args = _EnumeratedValues._strip_values(type_args) + if issubclass(col_type, SET) and '' in type_args: + type_kw['retrieve_as_bitwise'] = True + type_instance = col_type(*type_args, **type_kw) - col_args, col_kw = [], {} + col_kw = {} # NOT NULL col_kw['nullable'] = True - if spec.get('notnull', False): + # this can be "NULL" in the case of TIMESTAMP + if spec.get('notnull', False) == 'NOT NULL': col_kw['nullable'] = False # AUTO_INCREMENT @@ -2833,7 +3199,7 @@ def _describe_to_create(self, table_name, columns): buffer = [] for row in columns: (name, col_type, nullable, default, extra) = \ - [row[i] for i in (0, 1, 2, 4, 5)] + [row[i] for i in (0, 1, 2, 4, 5)] line = [' '] line.append(self.preparer.quote_identifier(name)) @@ -2912,15 +3278,15 @@ def _prep_regexes(self): r'%(iq)s(?P(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +' r'(?P\w+)' r'(?:\((?P(?:\d+|\d+,\d+|' - r'(?:\x27(?:\x27\x27|[^\x27])*\x27,?)+))\))?' + r'(?:\x27(?:\x27\x27|[^\x27])*\x27,?)+))\))?' r'(?: +(?PUNSIGNED))?' r'(?: +(?PZEROFILL))?' r'(?: +CHARACTER SET +(?P[\w_]+))?' r'(?: +COLLATE +(?P[\w_]+))?' - r'(?: +(?PNOT NULL))?' + r'(?: +(?P(?:NOT )?NULL))?' r'(?: +DEFAULT +(?P' - r'(?:NULL|\x27(?:\x27\x27|[^\x27])*\x27|\w+' - r'(?: +ON UPDATE \w+)?)' + r'(?:NULL|\x27(?:\x27\x27|[^\x27])*\x27|\w+' + r'(?: +ON UPDATE \w+)?)' r'))?' r'(?: +(?PAUTO_INCREMENT))?' r'(?: +COMMENT +(P(?:\x27\x27|[^\x27])+))?' @@ -2929,7 +3295,7 @@ def _prep_regexes(self): r'(?: +(?P.*))?' r',?$' % quotes - ) + ) # Fallback, try to parse as little as possible self._re_column_loose = _re_compile( @@ -2937,9 +3303,9 @@ def _prep_regexes(self): r'%(iq)s(?P(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +' r'(?P\w+)' r'(?:\((?P(?:\d+|\d+,\d+|\x27(?:\x27\x27|[^\x27])+\x27))\))?' - r'.*?(?PNOT NULL)?' + r'.*?(?P(?:NOT )NULL)?' % quotes - ) + ) # (PRIMARY|UNIQUE|FULLTEXT|SPATIAL) INDEX `name` (USING (BTREE|HASH))? # (`col` (ASC|DESC)?, `col` (ASC|DESC)?) @@ -2951,11 +3317,11 @@ def _prep_regexes(self): r'(?: +USING +(?P\S+))?' r' +\((?P.+?)\)' r'(?: +USING +(?P\S+))?' - r'(?: +KEY_BLOCK_SIZE +(?P\S+))?' + r'(?: +KEY_BLOCK_SIZE *[ =]? *(?P\S+))?' r'(?: +WITH PARSER +(?P\S+))?' r',?$' % quotes - ) + ) # CONSTRAINT `name` FOREIGN KEY (`local_col`) # REFERENCES `remote` (`remote_col`) @@ -2971,13 +3337,14 @@ def _prep_regexes(self): r'%(iq)s(?P(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +' r'FOREIGN KEY +' r'\((?P[^\)]+?)\) REFERENCES +' - r'(?P
${renderQualityPill(int(hItem["quality"]))}${hItem["seeders"] if hItem["seeders"] > -1 else '-'}
%(iq)s[^%(fq)s]+%(fq)s(?:\.%(iq)s[^%(fq)s]+%(fq)s)?) +' + r'(?P
%(iq)s[^%(fq)s]+%(fq)s' + r'(?:\.%(iq)s[^%(fq)s]+%(fq)s)?) +' r'\((?P[^\)]+?)\)' r'(?: +(?PMATCH \w+))?' r'(?: +ON DELETE (?P%(on)s))?' r'(?: +ON UPDATE (?P%(on)s))?' % kw - ) + ) # PARTITION # @@ -3000,8 +3367,9 @@ def _prep_regexes(self): self._add_option_regex('UNION', r'\([^\)]+\)') self._add_option_regex('TABLESPACE', r'.*? STORAGE DISK') - self._add_option_regex('RAID_TYPE', - r'\w+\s+RAID_CHUNKS\s*\=\s*\w+RAID_CHUNKSIZE\s*=\s*\w+') + self._add_option_regex( + 'RAID_TYPE', + r'\w+\s+RAID_CHUNKS\s*\=\s*\w+RAID_CHUNKSIZE\s*=\s*\w+') _optional_equals = r'(?:\s*(?:=\s*)|\s+)' @@ -3009,8 +3377,9 @@ def _add_option_string(self, directive): regex = (r'(?P%s)%s' r"'(?P(?:[^']|'')*?)'(?!')" % (re.escape(directive), self._optional_equals)) - self._pr_options.append(_pr_compile(regex, lambda v: - v.replace("\\\\", "\\").replace("''", "'"))) + self._pr_options.append(_pr_compile( + regex, lambda v: v.replace("\\\\", "\\").replace("''", "'") + )) def _add_option_word(self, directive): regex = (r'(?P%s)%s' @@ -3028,7 +3397,6 @@ def _add_option_regex(self, directive, regex): 'PASSWORD', 'CONNECTION') - class _DecodingRowProxy(object): """Return unicode-decoded values based on type inspection. @@ -3042,9 +3410,17 @@ class _DecodingRowProxy(object): # sets.Set(['value']) (seriously) but thankfully that doesn't # seem to come up in DDL queries. + _encoding_compat = { + 'koi8r': 'koi8_r', + 'koi8u': 'koi8_u', + 'utf16': 'utf-16-be', # MySQL's uft16 is always bigendian + 'utf8mb4': 'utf8', # real utf8 + 'eucjpms': 'ujis', + } + def __init__(self, rowproxy, charset): self.rowproxy = rowproxy - self.charset = charset + self.charset = self._encoding_compat.get(charset, charset) def __getitem__(self, index): item = self.rowproxy[index] diff --git a/lib/sqlalchemy/dialects/mysql/cymysql.py b/lib/sqlalchemy/dialects/mysql/cymysql.py index 4972804595..8bc0ae3be7 100644 --- a/lib/sqlalchemy/dialects/mysql/cymysql.py +++ b/lib/sqlalchemy/dialects/mysql/cymysql.py @@ -1,5 +1,6 @@ # mysql/cymysql.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -9,7 +10,8 @@ .. dialect:: mysql+cymysql :name: CyMySQL :dbapi: cymysql - :connectstring: mysql+cymysql://:@/[?] + :connectstring: mysql+cymysql://:@/\ +[?] :url: https://github.com/nakagami/CyMySQL """ @@ -19,6 +21,7 @@ from .base import (BIT, MySQLDialect) from ... import util + class _cymysqlBIT(BIT): def result_processor(self, dialect, coltype): """Convert a MySQL's 64 bit, variable length binary string to a long. @@ -73,7 +76,7 @@ def _extract_error_code(self, exception): def is_disconnect(self, e, connection, cursor): if isinstance(e, self.dbapi.OperationalError): return self._extract_error_code(e) in \ - (2006, 2013, 2014, 2045, 2055) + (2006, 2013, 2014, 2045, 2055) elif isinstance(e, self.dbapi.InterfaceError): # if underlying connection is closed, # this is the error you get diff --git a/lib/sqlalchemy/dialects/mysql/gaerdbms.py b/lib/sqlalchemy/dialects/mysql/gaerdbms.py index 13203fce3c..4e365884e6 100644 --- a/lib/sqlalchemy/dialects/mysql/gaerdbms.py +++ b/lib/sqlalchemy/dialects/mysql/gaerdbms.py @@ -1,5 +1,6 @@ # mysql/gaerdbms.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -8,13 +9,21 @@ :name: Google Cloud SQL :dbapi: rdbms :connectstring: mysql+gaerdbms:///?instance= - :url: https://developers.google.com/appengine/docs/python/cloud-sql/developers-guide + :url: https://developers.google.com/appengine/docs/python/cloud-sql/\ +developers-guide - This dialect is based primarily on the :mod:`.mysql.mysqldb` dialect with minimal - changes. + This dialect is based primarily on the :mod:`.mysql.mysqldb` dialect with + minimal changes. .. versionadded:: 0.7.8 + .. deprecated:: 1.0 This dialect is **no longer necessary** for + Google Cloud SQL; the MySQLdb dialect can be used directly. + Cloud SQL now recommends creating connections via the + mysql dialect using the URL format + + ``mysql+mysqldb://root@/?unix_socket=/cloudsql/:`` + Pooling ------- @@ -31,6 +40,7 @@ from .mysqldb import MySQLDialect_mysqldb from ...pool import NullPool import re +from sqlalchemy.util import warn_deprecated def _is_dev_environment(): @@ -41,10 +51,18 @@ class MySQLDialect_gaerdbms(MySQLDialect_mysqldb): @classmethod def dbapi(cls): + + warn_deprecated( + "Google Cloud SQL now recommends creating connections via the " + "MySQLdb dialect directly, using the URL format " + "mysql+mysqldb://root@/?unix_socket=/cloudsql/" + ":" + ) + # from django: # http://code.google.com/p/googleappengine/source/ # browse/trunk/python/google/storage/speckle/ - # python/django/backend/base.py#118 + # python/django/backend/base.py#118 # see also [ticket:2649] # see also http://stackoverflow.com/q/14224679/34549 from google.appengine.api import apiproxy_stub_map diff --git a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py index 3536c3ad81..a3a3f2be9f 100644 --- a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py +++ b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py @@ -1,5 +1,6 @@ # mysql/mysqlconnector.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -8,17 +9,25 @@ .. dialect:: mysql+mysqlconnector :name: MySQL Connector/Python :dbapi: myconnpy - :connectstring: mysql+mysqlconnector://:@[:]/ + :connectstring: mysql+mysqlconnector://:@\ +[:]/ :url: http://dev.mysql.com/downloads/connector/python/ +Unicode +------- + +Please see :ref:`mysql_unicode` for current recommendations on unicode +handling. + """ -from .base import (MySQLDialect, - MySQLExecutionContext, MySQLCompiler, MySQLIdentifierPreparer, - BIT) +from .base import (MySQLDialect, MySQLExecutionContext, + MySQLCompiler, MySQLIdentifierPreparer, + BIT) from ... import util +import re class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext): @@ -29,18 +38,34 @@ def get_lastrowid(self): class MySQLCompiler_mysqlconnector(MySQLCompiler): def visit_mod_binary(self, binary, operator, **kw): - return self.process(binary.left, **kw) + " %% " + \ - self.process(binary.right, **kw) + if self.dialect._mysqlconnector_double_percents: + return self.process(binary.left, **kw) + " %% " + \ + self.process(binary.right, **kw) + else: + return self.process(binary.left, **kw) + " % " + \ + self.process(binary.right, **kw) def post_process_text(self, text): - return text.replace('%', '%%') + if self.dialect._mysqlconnector_double_percents: + return text.replace('%', '%%') + else: + return text + + def escape_literal_column(self, text): + if self.dialect._mysqlconnector_double_percents: + return text.replace('%', '%%') + else: + return text class MySQLIdentifierPreparer_mysqlconnector(MySQLIdentifierPreparer): def _escape_identifier(self, value): value = value.replace(self.escape_quote, self.escape_to_quote) - return value.replace("%", "%%") + if self.dialect._mysqlconnector_double_percents: + return value.replace("%", "%%") + else: + return value class _myconnpyBIT(BIT): @@ -53,8 +78,6 @@ def result_processor(self, dialect, coltype): class MySQLDialect_mysqlconnector(MySQLDialect): driver = 'mysqlconnector' - if util.py2k: - supports_unicode_statements = False supports_unicode_binds = True supports_sane_rowcount = True @@ -75,6 +98,10 @@ class MySQLDialect_mysqlconnector(MySQLDialect): } ) + @util.memoized_property + def supports_unicode_statements(self): + return util.py3k or self._mysqlconnector_version_info > (2, 0) + @classmethod def dbapi(cls): from mysql import connector @@ -87,21 +114,39 @@ def create_connect_args(self, url): util.coerce_kw_type(opts, 'buffered', bool) util.coerce_kw_type(opts, 'raise_on_warnings', bool) + + # unfortunately, MySQL/connector python refuses to release a + # cursor without reading fully, so non-buffered isn't an option opts.setdefault('buffered', True) - opts.setdefault('raise_on_warnings', True) # FOUND_ROWS must be set in ClientFlag to enable # supports_sane_rowcount. if self.dbapi is not None: try: from mysql.connector.constants import ClientFlag - client_flags = opts.get('client_flags', ClientFlag.get_default()) + client_flags = opts.get( + 'client_flags', ClientFlag.get_default()) client_flags |= ClientFlag.FOUND_ROWS opts['client_flags'] = client_flags - except: + except Exception: pass return [[], opts] + @util.memoized_property + def _mysqlconnector_version_info(self): + if self.dbapi and hasattr(self.dbapi, '__version__'): + m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?', + self.dbapi.__version__) + if m: + return tuple( + int(x) + for x in m.group(1, 2, 3) + if x is not None) + + @util.memoized_property + def _mysqlconnector_double_percents(self): + return not util.py3k and self._mysqlconnector_version_info < (2, 0) + def _get_server_version_info(self, connection): dbapi_con = connection.connection version = dbapi_con.get_server_version() diff --git a/lib/sqlalchemy/dialects/mysql/mysqldb.py b/lib/sqlalchemy/dialects/mysql/mysqldb.py index 7fb63f13bc..9c35eb77b4 100644 --- a/lib/sqlalchemy/dialects/mysql/mysqldb.py +++ b/lib/sqlalchemy/dialects/mysql/mysqldb.py @@ -1,5 +1,6 @@ # mysql/mysqldb.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -12,83 +13,186 @@ :connectstring: mysql+mysqldb://:@[:]/ :url: http://sourceforge.net/projects/mysql-python +.. _mysqldb_unicode: Unicode ------- -MySQLdb requires a "charset" parameter to be passed in order for it -to handle non-ASCII characters correctly. When this parameter is passed, -MySQLdb will also implicitly set the "use_unicode" flag to true, which means -that it will return Python unicode objects instead of bytestrings. -However, SQLAlchemy's decode process, when C extensions are enabled, -is orders of magnitude faster than that of MySQLdb as it does not call into -Python functions to do so. Therefore, the **recommended URL to use for -unicode** will include both charset and use_unicode=0:: +Please see :ref:`mysql_unicode` for current recommendations on unicode +handling. - create_engine("mysql+mysqldb://user:pass@host/dbname?charset=utf8&use_unicode=0") +Py3K Support +------------ -As of this writing, MySQLdb only runs on Python 2. It is not known how -MySQLdb behaves on Python 3 as far as unicode decoding. +Currently, MySQLdb only runs on Python 2 and development has been stopped. +`mysqlclient`_ is fork of MySQLdb and provides Python 3 support as well +as some bugfixes. +.. _mysqlclient: https://github.com/PyMySQL/mysqlclient-python -Known Issues -------------- +Using MySQLdb with Google Cloud SQL +----------------------------------- -MySQL-python version 1.2.2 has a serious memory leak related -to unicode conversion, a feature which is disabled via ``use_unicode=0``. -It is strongly advised to use the latest version of MySQL-Python. +Google Cloud SQL now recommends use of the MySQLdb dialect. Connect +using a URL like the following:: + + mysql+mysqldb://root@/?unix_socket=/cloudsql/: """ from .base import (MySQLDialect, MySQLExecutionContext, - MySQLCompiler, MySQLIdentifierPreparer) -from ...connectors.mysqldb import ( - MySQLDBExecutionContext, - MySQLDBCompiler, - MySQLDBIdentifierPreparer, - MySQLDBConnector - ) + MySQLCompiler, MySQLIdentifierPreparer) from .base import TEXT from ... import sql +from ... import util +import re + + +class MySQLExecutionContext_mysqldb(MySQLExecutionContext): + + @property + def rowcount(self): + if hasattr(self, '_rowcount'): + return self._rowcount + else: + return self.cursor.rowcount + -class MySQLExecutionContext_mysqldb(MySQLDBExecutionContext, MySQLExecutionContext): - pass +class MySQLCompiler_mysqldb(MySQLCompiler): + def visit_mod_binary(self, binary, operator, **kw): + return self.process(binary.left, **kw) + " %% " + \ + self.process(binary.right, **kw) + def post_process_text(self, text): + return text.replace('%', '%%') -class MySQLCompiler_mysqldb(MySQLDBCompiler, MySQLCompiler): - pass +class MySQLIdentifierPreparer_mysqldb(MySQLIdentifierPreparer): -class MySQLIdentifierPreparer_mysqldb(MySQLDBIdentifierPreparer, MySQLIdentifierPreparer): - pass + def _escape_identifier(self, value): + value = value.replace(self.escape_quote, self.escape_to_quote) + return value.replace("%", "%%") -class MySQLDialect_mysqldb(MySQLDBConnector, MySQLDialect): +class MySQLDialect_mysqldb(MySQLDialect): + driver = 'mysqldb' + supports_unicode_statements = True + supports_sane_rowcount = True + supports_sane_multi_rowcount = True + + supports_native_decimal = True + + default_paramstyle = 'format' execution_ctx_cls = MySQLExecutionContext_mysqldb statement_compiler = MySQLCompiler_mysqldb preparer = MySQLIdentifierPreparer_mysqldb + @classmethod + def dbapi(cls): + return __import__('MySQLdb') + + def do_executemany(self, cursor, statement, parameters, context=None): + rowcount = cursor.executemany(statement, parameters) + if context is not None: + context._rowcount = rowcount + def _check_unicode_returns(self, connection): # work around issue fixed in # https://github.com/farcepest/MySQLdb1/commit/cd44524fef63bd3fcb71947392326e9742d520e8 # specific issue w/ the utf8_bin collation and unicode returns - has_utf8_bin = connection.scalar( - "show collation where %s = 'utf8' and %s = 'utf8_bin'" - % ( - self.identifier_preparer.quote("Charset"), - self.identifier_preparer.quote("Collation") - )) + has_utf8_bin = self.server_version_info > (5, ) and \ + connection.scalar( + "show collation where %s = 'utf8' and %s = 'utf8_bin'" + % ( + self.identifier_preparer.quote("Charset"), + self.identifier_preparer.quote("Collation") + )) if has_utf8_bin: additional_tests = [ sql.collate(sql.cast( - sql.literal_column( + sql.literal_column( "'test collated returns'"), - TEXT(charset='utf8')), "utf8_bin") + TEXT(charset='utf8')), "utf8_bin") ] else: additional_tests = [] - return super(MySQLDBConnector, self)._check_unicode_returns( - connection, additional_tests) + return super(MySQLDialect_mysqldb, self)._check_unicode_returns( + connection, additional_tests) + + def create_connect_args(self, url): + opts = url.translate_connect_args(database='db', username='user', + password='passwd') + opts.update(url.query) + + util.coerce_kw_type(opts, 'compress', bool) + util.coerce_kw_type(opts, 'connect_timeout', int) + util.coerce_kw_type(opts, 'read_timeout', int) + util.coerce_kw_type(opts, 'client_flag', int) + util.coerce_kw_type(opts, 'local_infile', int) + # Note: using either of the below will cause all strings to be + # returned as Unicode, both in raw SQL operations and with column + # types like String and MSString. + util.coerce_kw_type(opts, 'use_unicode', bool) + util.coerce_kw_type(opts, 'charset', str) + + # Rich values 'cursorclass' and 'conv' are not supported via + # query string. + + ssl = {} + keys = ['ssl_ca', 'ssl_key', 'ssl_cert', 'ssl_capath', 'ssl_cipher'] + for key in keys: + if key in opts: + ssl[key[4:]] = opts[key] + util.coerce_kw_type(ssl, key[4:], str) + del opts[key] + if ssl: + opts['ssl'] = ssl + + # FOUND_ROWS must be set in CLIENT_FLAGS to enable + # supports_sane_rowcount. + client_flag = opts.get('client_flag', 0) + if self.dbapi is not None: + try: + CLIENT_FLAGS = __import__( + self.dbapi.__name__ + '.constants.CLIENT' + ).constants.CLIENT + client_flag |= CLIENT_FLAGS.FOUND_ROWS + except (AttributeError, ImportError): + self.supports_sane_rowcount = False + opts['client_flag'] = client_flag + return [[], opts] + + def _get_server_version_info(self, connection): + dbapi_con = connection.connection + version = [] + r = re.compile('[.\-]') + for n in r.split(dbapi_con.get_server_info()): + try: + version.append(int(n)) + except ValueError: + version.append(n) + return tuple(version) + + def _extract_error_code(self, exception): + return exception.args[0] + + def _detect_charset(self, connection): + """Sniff out the character set in use for connection results.""" + + try: + # note: the SQL here would be + # "SHOW VARIABLES LIKE 'character_set%%'" + cset_name = connection.connection.character_set_name + except AttributeError: + util.warn( + "No 'character_set_name' can be detected with " + "this MySQL-Python version; " + "please upgrade to a recent version of MySQL-Python. " + "Assuming latin1.") + return 'latin1' + else: + return cset_name() + dialect = MySQLDialect_mysqldb diff --git a/lib/sqlalchemy/dialects/mysql/oursql.py b/lib/sqlalchemy/dialects/mysql/oursql.py index e6b50f335b..b91db18369 100644 --- a/lib/sqlalchemy/dialects/mysql/oursql.py +++ b/lib/sqlalchemy/dialects/mysql/oursql.py @@ -1,5 +1,6 @@ # mysql/oursql.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -15,22 +16,10 @@ Unicode ------- -oursql defaults to using ``utf8`` as the connection charset, but other -encodings may be used instead. Like the MySQL-Python driver, unicode support -can be completely disabled:: +Please see :ref:`mysql_unicode` for current recommendations on unicode +handling. - # oursql sets the connection charset to utf8 automatically; all strings come - # back as utf8 str - create_engine('mysql+oursql:///mydb?use_unicode=0') -To not automatically use ``utf8`` and instead use whatever the connection -defaults to, there is a separate parameter:: - - # use the default connection charset; all strings come back as unicode - create_engine('mysql+oursql:///mydb?default_charset=1') - - # use latin1 as the connection charset; all strings come back as unicode - create_engine('mysql+oursql:///mydb?charset=latin1') """ import re @@ -79,7 +68,8 @@ def dbapi(cls): return __import__('oursql') def do_execute(self, cursor, statement, parameters, context=None): - """Provide an implementation of *cursor.execute(statement, parameters)*.""" + """Provide an implementation of + *cursor.execute(statement, parameters)*.""" if context and context.plain_query: cursor.execute(statement, plain_query=True) @@ -94,9 +84,11 @@ def _xa_query(self, connection, query, xid): arg = connection.connection._escape_string(xid) else: charset = self._connection_charset - arg = connection.connection._escape_string(xid.encode(charset)).decode(charset) + arg = connection.connection._escape_string( + xid.encode(charset)).decode(charset) arg = "'%s'" % arg - connection.execution_options(_oursql_plain_query=True).execute(query % arg) + connection.execution_options( + _oursql_plain_query=True).execute(query % arg) # Because mysql is bad, these methods have to be # reimplemented to use _PlainQuery. Basically, some queries @@ -126,10 +118,10 @@ def do_commit_twophase(self, connection, xid, is_prepared=True, # am i on a newer/older version of OurSQL ? def has_table(self, connection, table_name, schema=None): return MySQLDialect.has_table( - self, - connection.connect().execution_options(_oursql_plain_query=True), - table_name, - schema + self, + connection.connect().execution_options(_oursql_plain_query=True), + table_name, + schema ) def get_table_options(self, connection, table_name, schema=None, **kw): @@ -189,7 +181,8 @@ def _show_create_table(self, connection, table, charset=None, def is_disconnect(self, e, connection, cursor): if isinstance(e, self.dbapi.ProgrammingError): - return e.errno is None and 'cursor' not in e.args[1] and e.args[1].endswith('closed') + return e.errno is None and 'cursor' not in e.args[1] \ + and e.args[1].endswith('closed') else: return e.errno in (2006, 2013, 2014, 2045, 2055) @@ -217,7 +210,7 @@ def create_connect_args(self, url): ssl = {} for key in ['ssl_ca', 'ssl_key', 'ssl_cert', - 'ssl_capath', 'ssl_cipher']: + 'ssl_capath', 'ssl_cipher']: if key in opts: ssl[key[4:]] = opts[key] util.coerce_kw_type(ssl, key[4:], str) diff --git a/lib/sqlalchemy/dialects/mysql/pymysql.py b/lib/sqlalchemy/dialects/mysql/pymysql.py index 7989203cfe..3c493fbfc2 100644 --- a/lib/sqlalchemy/dialects/mysql/pymysql.py +++ b/lib/sqlalchemy/dialects/mysql/pymysql.py @@ -1,5 +1,6 @@ # mysql/pymysql.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -9,28 +10,39 @@ .. dialect:: mysql+pymysql :name: PyMySQL :dbapi: pymysql - :connectstring: mysql+pymysql://:@/[?] - :url: http://code.google.com/p/pymysql/ + :connectstring: mysql+pymysql://:@/\ +[?] + :url: http://www.pymysql.org/ + +Unicode +------- + +Please see :ref:`mysql_unicode` for current recommendations on unicode +handling. MySQL-Python Compatibility -------------------------- The pymysql DBAPI is a pure Python port of the MySQL-python (MySQLdb) driver, -and targets 100% compatibility. Most behavioral notes for MySQL-python apply to -the pymysql driver as well. +and targets 100% compatibility. Most behavioral notes for MySQL-python apply +to the pymysql driver as well. """ from .mysqldb import MySQLDialect_mysqldb from ...util import py3k + class MySQLDialect_pymysql(MySQLDialect_mysqldb): driver = 'pymysql' description_encoding = None - if py3k: - supports_unicode_statements = True + # generally, these two values should be both True + # or both False. PyMySQL unicode tests pass all the way back + # to 0.4 either way. See [ticket:3337] + supports_unicode_statements = True + supports_unicode_binds = True @classmethod def dbapi(cls): diff --git a/lib/sqlalchemy/dialects/mysql/pyodbc.py b/lib/sqlalchemy/dialects/mysql/pyodbc.py index e60e39cea6..882d3ea4e8 100644 --- a/lib/sqlalchemy/dialects/mysql/pyodbc.py +++ b/lib/sqlalchemy/dialects/mysql/pyodbc.py @@ -1,5 +1,6 @@ # mysql/pyodbc.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -13,14 +14,11 @@ :connectstring: mysql+pyodbc://:@ :url: http://pypi.python.org/pypi/pyodbc/ - -Limitations ------------ - -The mysql-pyodbc dialect is subject to unresolved character encoding issues -which exist within the current ODBC drivers available. -(see http://code.google.com/p/pyodbc/issues/detail?id=25). Consider usage -of OurSQL, MySQLdb, or MySQL-connector/Python. + .. note:: The PyODBC for MySQL dialect is not well supported, and + is subject to unresolved character encoding issues + which exist within the current ODBC drivers available. + (see http://code.google.com/p/pyodbc/issues/detail?id=25). + Other dialects for MySQL are recommended. """ @@ -66,7 +64,8 @@ def _detect_charset(self, connection): if opts.get(key, None): return opts[key] - util.warn("Could not detect the connection character set. Assuming latin1.") + util.warn("Could not detect the connection character set. " + "Assuming latin1.") return 'latin1' def _extract_error_code(self, exception): diff --git a/lib/sqlalchemy/dialects/mysql/zxjdbc.py b/lib/sqlalchemy/dialects/mysql/zxjdbc.py index b5fcfbdaf7..fe4c137051 100644 --- a/lib/sqlalchemy/dialects/mysql/zxjdbc.py +++ b/lib/sqlalchemy/dialects/mysql/zxjdbc.py @@ -1,5 +1,6 @@ # mysql/zxjdbc.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -9,9 +10,13 @@ .. dialect:: mysql+zxjdbc :name: zxjdbc for Jython :dbapi: zxjdbc - :connectstring: mysql+zxjdbc://:@[:]/ + :connectstring: mysql+zxjdbc://:@[:]/\ + :driverurl: http://dev.mysql.com/downloads/connector/j/ + .. note:: Jython is not supported by current versions of SQLAlchemy. The + zxjdbc dialect should be considered as experimental. + Character Sets -------------- @@ -19,7 +24,7 @@ zxjdbc/JDBC layer. To allow multiple character sets to be sent from the MySQL Connector/J JDBC driver, by default SQLAlchemy sets its ``characterEncoding`` connection property to ``UTF-8``. It may be -overriden via a ``create_engine`` URL parameter. +overridden via a ``create_engine`` URL parameter. """ import re @@ -82,7 +87,8 @@ def _detect_charset(self, connection): if opts.get(key, None): return opts[key] - util.warn("Could not detect the connection character set. Assuming latin1.") + util.warn("Could not detect the connection character set. " + "Assuming latin1.") return 'latin1' def _driver_kwargs(self): diff --git a/lib/sqlalchemy/dialects/oracle/__init__.py b/lib/sqlalchemy/dialects/oracle/__init__.py index b75762ab25..0c5c3174be 100644 --- a/lib/sqlalchemy/dialects/oracle/__init__.py +++ b/lib/sqlalchemy/dialects/oracle/__init__.py @@ -1,5 +1,6 @@ # oracle/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -16,8 +17,8 @@ __all__ = ( -'VARCHAR', 'NVARCHAR', 'CHAR', 'DATE', 'NUMBER', -'BLOB', 'BFILE', 'CLOB', 'NCLOB', 'TIMESTAMP', 'RAW', -'FLOAT', 'DOUBLE_PRECISION', 'LONG', 'dialect', 'INTERVAL', -'VARCHAR2', 'NVARCHAR2', 'ROWID' + 'VARCHAR', 'NVARCHAR', 'CHAR', 'DATE', 'NUMBER', + 'BLOB', 'BFILE', 'CLOB', 'NCLOB', 'TIMESTAMP', 'RAW', + 'FLOAT', 'DOUBLE_PRECISION', 'LONG', 'dialect', 'INTERVAL', + 'VARCHAR2', 'NVARCHAR2', 'ROWID' ) diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py index 8bacb885f8..eb639833e1 100644 --- a/lib/sqlalchemy/dialects/oracle/base.py +++ b/lib/sqlalchemy/dialects/oracle/base.py @@ -1,5 +1,6 @@ # oracle/base.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -8,31 +9,37 @@ .. dialect:: oracle :name: Oracle - Oracle version 8 through current (11g at the time of this writing) are supported. + Oracle version 8 through current (11g at the time of this writing) are + supported. Connect Arguments ----------------- -The dialect supports several :func:`~sqlalchemy.create_engine()` arguments which -affect the behavior of the dialect regardless of driver in use. +The dialect supports several :func:`~sqlalchemy.create_engine()` arguments +which affect the behavior of the dialect regardless of driver in use. -* ``use_ansi`` - Use ANSI JOIN constructs (see the section on Oracle 8). Defaults - to ``True``. If ``False``, Oracle-8 compatible constructs are used for joins. +* ``use_ansi`` - Use ANSI JOIN constructs (see the section on Oracle 8). + Defaults to ``True``. If ``False``, Oracle-8 compatible constructs are used + for joins. -* ``optimize_limits`` - defaults to ``False``. see the section on LIMIT/OFFSET. +* ``optimize_limits`` - defaults to ``False``. see the section on + LIMIT/OFFSET. -* ``use_binds_for_limits`` - defaults to ``True``. see the section on LIMIT/OFFSET. +* ``use_binds_for_limits`` - defaults to ``True``. see the section on + LIMIT/OFFSET. Auto Increment Behavior ----------------------- -SQLAlchemy Table objects which include integer primary keys are usually assumed to have -"autoincrementing" behavior, meaning they can generate their own primary key values upon -INSERT. Since Oracle has no "autoincrement" feature, SQLAlchemy relies upon sequences -to produce these values. With the Oracle dialect, *a sequence must always be explicitly -specified to enable autoincrement*. This is divergent with the majority of documentation -examples which assume the usage of an autoincrement-capable database. To specify sequences, -use the sqlalchemy.schema.Sequence object which is passed to a Column construct:: +SQLAlchemy Table objects which include integer primary keys are usually +assumed to have "autoincrementing" behavior, meaning they can generate their +own primary key values upon INSERT. Since Oracle has no "autoincrement" +feature, SQLAlchemy relies upon sequences to produce these values. With the +Oracle dialect, *a sequence must always be explicitly specified to enable +autoincrement*. This is divergent with the majority of documentation +examples which assume the usage of an autoincrement-capable database. To +specify sequences, use the sqlalchemy.schema.Sequence object which is passed +to a Column construct:: t = Table('mytable', metadata, Column('id', Integer, Sequence('id_seq'), primary_key=True), @@ -49,15 +56,16 @@ Identifier Casing ----------------- -In Oracle, the data dictionary represents all case insensitive identifier names -using UPPERCASE text. SQLAlchemy on the other hand considers an all-lower case identifier -name to be case insensitive. The Oracle dialect converts all case insensitive identifiers -to and from those two formats during schema level communication, such as reflection of -tables and indexes. Using an UPPERCASE name on the SQLAlchemy side indicates a -case sensitive identifier, and SQLAlchemy will quote the name - this will cause mismatches -against data dictionary data received from Oracle, so unless identifier names have been -truly created as case sensitive (i.e. using quoted names), all lowercase names should be -used on the SQLAlchemy side. +In Oracle, the data dictionary represents all case insensitive identifier +names using UPPERCASE text. SQLAlchemy on the other hand considers an +all-lower case identifier name to be case insensitive. The Oracle dialect +converts all case insensitive identifiers to and from those two formats during +schema level communication, such as reflection of tables and indexes. Using +an UPPERCASE name on the SQLAlchemy side indicates a case sensitive +identifier, and SQLAlchemy will quote the name - this will cause mismatches +against data dictionary data received from Oracle, so unless identifier names +have been truly created as case sensitive (i.e. using quoted names), all +lowercase names should be used on the SQLAlchemy side. LIMIT/OFFSET Support @@ -70,44 +78,49 @@ There are two options which affect its behavior: -* the "FIRST ROWS()" optimization keyword is not used by default. To enable the usage of this - optimization directive, specify ``optimize_limits=True`` to :func:`.create_engine`. -* the values passed for the limit/offset are sent as bound parameters. Some users have observed - that Oracle produces a poor query plan when the values are sent as binds and not - rendered literally. To render the limit/offset values literally within the SQL - statement, specify ``use_binds_for_limits=False`` to :func:`.create_engine`. - -Some users have reported better performance when the entirely different approach of a -window query is used, i.e. ROW_NUMBER() OVER (ORDER BY), to provide LIMIT/OFFSET (note -that the majority of users don't observe this). To suit this case the -method used for LIMIT/OFFSET can be replaced entirely. See the recipe at +* the "FIRST ROWS()" optimization keyword is not used by default. To enable + the usage of this optimization directive, specify ``optimize_limits=True`` + to :func:`.create_engine`. +* the values passed for the limit/offset are sent as bound parameters. Some + users have observed that Oracle produces a poor query plan when the values + are sent as binds and not rendered literally. To render the limit/offset + values literally within the SQL statement, specify + ``use_binds_for_limits=False`` to :func:`.create_engine`. + +Some users have reported better performance when the entirely different +approach of a window query is used, i.e. ROW_NUMBER() OVER (ORDER BY), to +provide LIMIT/OFFSET (note that the majority of users don't observe this). +To suit this case the method used for LIMIT/OFFSET can be replaced entirely. +See the recipe at http://www.sqlalchemy.org/trac/wiki/UsageRecipes/WindowFunctionsByDefault -which installs a select compiler that overrides the generation of limit/offset with -a window function. +which installs a select compiler that overrides the generation of limit/offset +with a window function. .. _oracle_returning: RETURNING Support ----------------- -The Oracle database supports a limited form of RETURNING, in order to retrieve result -sets of matched rows from INSERT, UPDATE and DELETE statements. Oracle's -RETURNING..INTO syntax only supports one row being returned, as it relies upon -OUT parameters in order to function. In addition, supported DBAPIs have further -limitations (see :ref:`cx_oracle_returning`). +The Oracle database supports a limited form of RETURNING, in order to retrieve +result sets of matched rows from INSERT, UPDATE and DELETE statements. +Oracle's RETURNING..INTO syntax only supports one row being returned, as it +relies upon OUT parameters in order to function. In addition, supported +DBAPIs have further limitations (see :ref:`cx_oracle_returning`). -SQLAlchemy's "implicit returning" feature, which employs RETURNING within an INSERT -and sometimes an UPDATE statement in order to fetch newly generated primary key values -and other SQL defaults and expressions, is normally enabled on the Oracle -backend. By default, "implicit returning" typically only fetches the value of a -single ``nextval(some_seq)`` expression embedded into an INSERT in order to increment -a sequence within an INSERT statement and get the value back at the same time. -To disable this feature across the board, specify ``implicit_returning=False`` to -:func:`.create_engine`:: +SQLAlchemy's "implicit returning" feature, which employs RETURNING within an +INSERT and sometimes an UPDATE statement in order to fetch newly generated +primary key values and other SQL defaults and expressions, is normally enabled +on the Oracle backend. By default, "implicit returning" typically only +fetches the value of a single ``nextval(some_seq)`` expression embedded into +an INSERT in order to increment a sequence within an INSERT statement and get +the value back at the same time. To disable this feature across the board, +specify ``implicit_returning=False`` to :func:`.create_engine`:: - engine = create_engine("oracle://scott:tiger@dsn", implicit_returning=False) + engine = create_engine("oracle://scott:tiger@dsn", + implicit_returning=False) -Implicit returning can also be disabled on a table-by-table basis as a table option:: +Implicit returning can also be disabled on a table-by-table basis as a table +option:: # Core Table my_table = Table("my_table", metadata, ..., implicit_returning=False) @@ -120,13 +133,15 @@ class MyClass(Base): .. seealso:: - :ref:`cx_oracle_returning` - additional cx_oracle-specific restrictions on implicit returning. + :ref:`cx_oracle_returning` - additional cx_oracle-specific restrictions on + implicit returning. ON UPDATE CASCADE ----------------- -Oracle doesn't have native ON UPDATE CASCADE functionality. A trigger based solution -is available at http://asktom.oracle.com/tkyte/update_cascade/index.html . +Oracle doesn't have native ON UPDATE CASCADE functionality. A trigger based +solution is available at +http://asktom.oracle.com/tkyte/update_cascade/index.html . When using the SQLAlchemy ORM, the ORM has limited ability to manually issue cascading updates - specify ForeignKey objects using the @@ -136,29 +151,32 @@ class MyClass(Base): Oracle 8 Compatibility ---------------------- -When Oracle 8 is detected, the dialect internally configures itself to the following -behaviors: +When Oracle 8 is detected, the dialect internally configures itself to the +following behaviors: * the use_ansi flag is set to False. This has the effect of converting all JOIN phrases into the WHERE clause, and in the case of LEFT OUTER JOIN makes use of Oracle's (+) operator. * the NVARCHAR2 and NCLOB datatypes are no longer generated as DDL when - the :class:`~sqlalchemy.types.Unicode` is used - VARCHAR2 and CLOB are issued - instead. This because these types don't seem to work correctly on Oracle 8 - even though they are available. The :class:`~sqlalchemy.types.NVARCHAR` - and :class:`~sqlalchemy.dialects.oracle.NCLOB` types will always generate NVARCHAR2 and NCLOB. + the :class:`~sqlalchemy.types.Unicode` is used - VARCHAR2 and CLOB are + issued instead. This because these types don't seem to work correctly on + Oracle 8 even though they are available. The + :class:`~sqlalchemy.types.NVARCHAR` and + :class:`~sqlalchemy.dialects.oracle.NCLOB` types will always generate + NVARCHAR2 and NCLOB. * the "native unicode" mode is disabled when using cx_oracle, i.e. SQLAlchemy - encodes all Python unicode objects to "string" before passing in as bind parameters. + encodes all Python unicode objects to "string" before passing in as bind + parameters. Synonym/DBLINK Reflection ------------------------- -When using reflection with Table objects, the dialect can optionally search for tables -indicated by synonyms, either in local or remote schemas or accessed over DBLINK, -by passing the flag ``oracle_resolve_synonyms=True`` as a -keyword argument to the :class:`.Table` construct:: +When using reflection with Table objects, the dialect can optionally search +for tables indicated by synonyms, either in local or remote schemas or +accessed over DBLINK, by passing the flag ``oracle_resolve_synonyms=True`` as +a keyword argument to the :class:`.Table` construct:: some_table = Table('some_table', autoload=True, autoload_with=some_engine, @@ -166,9 +184,10 @@ class MyClass(Base): When this flag is set, the given name (such as ``some_table`` above) will be searched not just in the ``ALL_TABLES`` view, but also within the -``ALL_SYNONYMS`` view to see if this name is actually a synonym to another name. -If the synonym is located and refers to a DBLINK, the oracle dialect knows -how to locate the table's information using DBLINK syntax (e.g. ``@dblink``). +``ALL_SYNONYMS`` view to see if this name is actually a synonym to another +name. If the synonym is located and refers to a DBLINK, the oracle dialect +knows how to locate the table's information using DBLINK syntax(e.g. +``@dblink``). ``oracle_resolve_synonyms`` is accepted wherever reflection arguments are accepted, including methods such as :meth:`.MetaData.reflect` and @@ -194,31 +213,98 @@ class MyClass(Base): examining the type of column for use in special Python translations or for migrating schemas to other database backends. +.. _oracle_table_options: + +Oracle Table Options +------------------------- + +The CREATE TABLE phrase supports the following options with Oracle +in conjunction with the :class:`.Table` construct: + + +* ``ON COMMIT``:: + + Table( + "some_table", metadata, ..., + prefixes=['GLOBAL TEMPORARY'], oracle_on_commit='PRESERVE ROWS') + +.. versionadded:: 1.0.0 + +* ``COMPRESS``:: + + Table('mytable', metadata, Column('data', String(32)), + oracle_compress=True) + + Table('mytable', metadata, Column('data', String(32)), + oracle_compress=6) + + The ``oracle_compress`` parameter accepts either an integer compression + level, or ``True`` to use the default compression level. + +.. versionadded:: 1.0.0 + +.. _oracle_index_options: + +Oracle Specific Index Options +----------------------------- + +Bitmap Indexes +~~~~~~~~~~~~~~ + +You can specify the ``oracle_bitmap`` parameter to create a bitmap index +instead of a B-tree index:: + + Index('my_index', my_table.c.data, oracle_bitmap=True) + +Bitmap indexes cannot be unique and cannot be compressed. SQLAlchemy will not +check for such limitations, only the database will. + +.. versionadded:: 1.0.0 + +Index compression +~~~~~~~~~~~~~~~~~ + +Oracle has a more efficient storage mode for indexes containing lots of +repeated values. Use the ``oracle_compress`` parameter to turn on key c +ompression:: + + Index('my_index', my_table.c.data, oracle_compress=True) + + Index('my_index', my_table.c.data1, my_table.c.data2, unique=True, + oracle_compress=1) + +The ``oracle_compress`` parameter accepts either an integer specifying the +number of prefix columns to compress, or ``True`` to use the default (all +columns for non-unique indexes, all but the last column for unique indexes). + +.. versionadded:: 1.0.0 + """ import re from sqlalchemy import util, sql -from sqlalchemy.engine import default, base, reflection +from sqlalchemy.engine import default, reflection from sqlalchemy.sql import compiler, visitors, expression -from sqlalchemy.sql import operators as sql_operators, functions as sql_functions +from sqlalchemy.sql import operators as sql_operators +from sqlalchemy.sql.elements import quoted_name from sqlalchemy import types as sqltypes, schema as sa_schema from sqlalchemy.types import VARCHAR, NVARCHAR, CHAR, \ - BLOB, CLOB, TIMESTAMP, FLOAT + BLOB, CLOB, TIMESTAMP, FLOAT RESERVED_WORDS = \ - set('SHARE RAW DROP BETWEEN FROM DESC OPTION PRIOR LONG THEN '\ - 'DEFAULT ALTER IS INTO MINUS INTEGER NUMBER GRANT IDENTIFIED '\ - 'ALL TO ORDER ON FLOAT DATE HAVING CLUSTER NOWAIT RESOURCE '\ - 'ANY TABLE INDEX FOR UPDATE WHERE CHECK SMALLINT WITH DELETE '\ - 'BY ASC REVOKE LIKE SIZE RENAME NOCOMPRESS NULL GROUP VALUES '\ - 'AS IN VIEW EXCLUSIVE COMPRESS SYNONYM SELECT INSERT EXISTS '\ - 'NOT TRIGGER ELSE CREATE INTERSECT PCTFREE DISTINCT USER '\ - 'CONNECT SET MODE OF UNIQUE VARCHAR2 VARCHAR LOCK OR CHAR '\ + set('SHARE RAW DROP BETWEEN FROM DESC OPTION PRIOR LONG THEN ' + 'DEFAULT ALTER IS INTO MINUS INTEGER NUMBER GRANT IDENTIFIED ' + 'ALL TO ORDER ON FLOAT DATE HAVING CLUSTER NOWAIT RESOURCE ' + 'ANY TABLE INDEX FOR UPDATE WHERE CHECK SMALLINT WITH DELETE ' + 'BY ASC REVOKE LIKE SIZE RENAME NOCOMPRESS NULL GROUP VALUES ' + 'AS IN VIEW EXCLUSIVE COMPRESS SYNONYM SELECT INSERT EXISTS ' + 'NOT TRIGGER ELSE CREATE INTERSECT PCTFREE DISTINCT USER ' + 'CONNECT SET MODE OF UNIQUE VARCHAR2 VARCHAR LOCK OR CHAR ' 'DECIMAL UNION PUBLIC AND START UID COMMENT CURRENT LEVEL'.split()) NO_ARG_FNS = set('UID CURRENT_DATE SYSDATE USER ' - 'CURRENT_TIME CURRENT_TIMESTAMP'.split()) + 'CURRENT_TIME CURRENT_TIMESTAMP'.split()) class RAW(sqltypes._Binary): @@ -243,7 +329,8 @@ def __init__(self, precision=None, scale=None, asdecimal=None): if asdecimal is None: asdecimal = bool(scale and scale > 0) - super(NUMBER, self).__init__(precision=precision, scale=scale, asdecimal=asdecimal) + super(NUMBER, self).__init__( + precision=precision, scale=scale, asdecimal=asdecimal) def adapt(self, impltype): ret = super(NUMBER, self).adapt(impltype) @@ -266,7 +353,8 @@ def __init__(self, precision=None, scale=None, asdecimal=None): if asdecimal is None: asdecimal = False - super(DOUBLE_PRECISION, self).__init__(precision=precision, scale=scale, asdecimal=asdecimal) + super(DOUBLE_PRECISION, self).__init__( + precision=precision, scale=scale, asdecimal=asdecimal) class BFILE(sqltypes.LargeBinary): @@ -276,6 +364,7 @@ class BFILE(sqltypes.LargeBinary): class LONG(sqltypes.Text): __visit_name__ = 'LONG' + class DATE(sqltypes.DateTime): """Provide the oracle DATE type. @@ -288,7 +377,6 @@ class DATE(sqltypes.DateTime): """ __visit_name__ = 'DATE' - def _compare_type_affinity(self, other): return other._type_affinity in (sqltypes.DateTime, sqltypes.Date) @@ -297,18 +385,19 @@ class INTERVAL(sqltypes.TypeEngine): __visit_name__ = 'INTERVAL' def __init__(self, - day_precision=None, - second_precision=None): + day_precision=None, + second_precision=None): """Construct an INTERVAL. Note that only DAY TO SECOND intervals are currently supported. This is due to a lack of support for YEAR TO MONTH intervals within available DBAPIs (cx_oracle and zxjdbc). - :param day_precision: the day precision value. this is the number of digits - to store for the day field. Defaults to "2" - :param second_precision: the second precision value. this is the number of digits - to store for the fractional seconds field. Defaults to "6". + :param day_precision: the day precision value. this is the number of + digits to store for the day field. Defaults to "2" + :param second_precision: the second precision value. this is the + number of digits to store for the fractional seconds field. + Defaults to "6". """ self.day_precision = day_precision @@ -369,44 +458,44 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler): # Oracle does not allow milliseconds in DATE # Oracle does not support TIME columns - def visit_datetime(self, type_): - return self.visit_DATE(type_) + def visit_datetime(self, type_, **kw): + return self.visit_DATE(type_, **kw) - def visit_float(self, type_): - return self.visit_FLOAT(type_) + def visit_float(self, type_, **kw): + return self.visit_FLOAT(type_, **kw) - def visit_unicode(self, type_): + def visit_unicode(self, type_, **kw): if self.dialect._supports_nchar: - return self.visit_NVARCHAR2(type_) + return self.visit_NVARCHAR2(type_, **kw) else: - return self.visit_VARCHAR2(type_) + return self.visit_VARCHAR2(type_, **kw) - def visit_INTERVAL(self, type_): + def visit_INTERVAL(self, type_, **kw): return "INTERVAL DAY%s TO SECOND%s" % ( type_.day_precision is not None and - "(%d)" % type_.day_precision or - "", + "(%d)" % type_.day_precision or + "", type_.second_precision is not None and - "(%d)" % type_.second_precision or - "", + "(%d)" % type_.second_precision or + "", ) - def visit_LONG(self, type_): + def visit_LONG(self, type_, **kw): return "LONG" - def visit_TIMESTAMP(self, type_): + def visit_TIMESTAMP(self, type_, **kw): if type_.timezone: return "TIMESTAMP WITH TIME ZONE" else: return "TIMESTAMP" - def visit_DOUBLE_PRECISION(self, type_): - return self._generate_numeric(type_, "DOUBLE PRECISION") + def visit_DOUBLE_PRECISION(self, type_, **kw): + return self._generate_numeric(type_, "DOUBLE PRECISION", **kw) def visit_NUMBER(self, type_, **kw): return self._generate_numeric(type_, "NUMBER", **kw) - def _generate_numeric(self, type_, name, precision=None, scale=None): + def _generate_numeric(self, type_, name, precision=None, scale=None, **kw): if precision is None: precision = type_.precision @@ -422,17 +511,17 @@ def _generate_numeric(self, type_, name, precision=None, scale=None): n = "%(name)s(%(precision)s, %(scale)s)" return n % {'name': name, 'precision': precision, 'scale': scale} - def visit_string(self, type_): - return self.visit_VARCHAR2(type_) + def visit_string(self, type_, **kw): + return self.visit_VARCHAR2(type_, **kw) - def visit_VARCHAR2(self, type_): + def visit_VARCHAR2(self, type_, **kw): return self._visit_varchar(type_, '', '2') - def visit_NVARCHAR2(self, type_): + def visit_NVARCHAR2(self, type_, **kw): return self._visit_varchar(type_, 'N', '2') visit_NVARCHAR = visit_NVARCHAR2 - def visit_VARCHAR(self, type_): + def visit_VARCHAR(self, type_, **kw): return self._visit_varchar(type_, '', '') def _visit_varchar(self, type_, n, num): @@ -445,31 +534,31 @@ def _visit_varchar(self, type_, n, num): varchar = "%(n)sVARCHAR%(two)s(%(length)s)" return varchar % {'length': type_.length, 'two': num, 'n': n} - def visit_text(self, type_): - return self.visit_CLOB(type_) + def visit_text(self, type_, **kw): + return self.visit_CLOB(type_, **kw) - def visit_unicode_text(self, type_): + def visit_unicode_text(self, type_, **kw): if self.dialect._supports_nchar: - return self.visit_NCLOB(type_) + return self.visit_NCLOB(type_, **kw) else: - return self.visit_CLOB(type_) + return self.visit_CLOB(type_, **kw) - def visit_large_binary(self, type_): - return self.visit_BLOB(type_) + def visit_large_binary(self, type_, **kw): + return self.visit_BLOB(type_, **kw) - def visit_big_integer(self, type_): - return self.visit_NUMBER(type_, precision=19) + def visit_big_integer(self, type_, **kw): + return self.visit_NUMBER(type_, precision=19, **kw) - def visit_boolean(self, type_): - return self.visit_SMALLINT(type_) + def visit_boolean(self, type_, **kw): + return self.visit_SMALLINT(type_, **kw) - def visit_RAW(self, type_): + def visit_RAW(self, type_, **kw): if type_.length: return "RAW(%(length)s)" % {'length': type_.length} else: return "RAW" - def visit_ROWID(self, type_): + def visit_ROWID(self, type_, **kw): return "ROWID" @@ -482,7 +571,7 @@ class OracleCompiler(compiler.SQLCompiler): compound_keywords = util.update_copy( compiler.SQLCompiler.compound_keywords, { - expression.CompoundSelect.EXCEPT: 'MINUS' + expression.CompoundSelect.EXCEPT: 'MINUS' } ) @@ -503,7 +592,7 @@ def visit_char_length_func(self, fn, **kw): def visit_match_op_binary(self, binary, operator, **kw): return "CONTAINS (%s, %s)" % (self.process(binary.left), - self.process(binary.right)) + self.process(binary.right)) def visit_true(self, expr, **kw): return '1' @@ -511,6 +600,9 @@ def visit_true(self, expr, **kw): def visit_false(self, expr, **kw): return '0' + def get_cte_preamble(self, recursive): + return "WITH" + def get_select_hint_text(self, byfroms): return " ".join( "/*+ %s */" % text for table, text in byfroms.items() @@ -541,8 +633,7 @@ def visit_join(self, join, **kwargs): else: right = join.right return self.process(join.left, **kwargs) + \ - ", " + self.process(right, **kwargs) - + ", " + self.process(right, **kwargs) def _get_nonansi_join_whereclause(self, froms): clauses = [] @@ -555,8 +646,8 @@ def visit_binary(binary): binary.left = _OuterJoinColumn(binary.left) elif join.right.is_derived_from(binary.right.table): binary.right = _OuterJoinColumn(binary.right) - clauses.append(visitors.cloned_traverse(join.onclause, {}, - {'binary': visit_binary})) + clauses.append(visitors.cloned_traverse( + join.onclause, {}, {'binary': visit_binary})) else: clauses.append(join.onclause) @@ -575,50 +666,47 @@ def visit_binary(binary): else: return sql.and_(*clauses) - def visit_outer_join_column(self, vc): - return self.process(vc.column) + "(+)" + def visit_outer_join_column(self, vc, **kw): + return self.process(vc.column, **kw) + "(+)" def visit_sequence(self, seq): - return self.dialect.identifier_preparer.format_sequence(seq) + ".nextval" + return (self.dialect.identifier_preparer.format_sequence(seq) + + ".nextval") - def visit_alias(self, alias, asfrom=False, ashint=False, **kwargs): - """Oracle doesn't like ``FROM table AS alias``. Is the AS standard SQL??""" + def get_render_as_alias_suffix(self, alias_name_text): + """Oracle doesn't like ``FROM table AS alias``""" - if asfrom or ashint: - alias_name = isinstance(alias.name, expression._truncated_label) and \ - self._truncated_identifier("alias", alias.name) or alias.name - - if ashint: - return alias_name - elif asfrom: - return self.process(alias.original, asfrom=asfrom, **kwargs) + \ - " " + self.preparer.format_alias(alias, alias_name) - else: - return self.process(alias.original, **kwargs) + return " " + alias_name_text def returning_clause(self, stmt, returning_cols): columns = [] binds = [] - for i, column in enumerate(expression._select_iterables(returning_cols)): + for i, column in enumerate( + expression._select_iterables(returning_cols)): if column.type._has_column_expression: col_expr = column.type.column_expression(column) else: col_expr = column outparam = sql.outparam("ret_%d" % i, type_=column.type) self.binds[outparam.key] = outparam - binds.append(self.bindparam_string(self._truncate_bindparam(outparam))) - columns.append(self.process(col_expr, within_columns_clause=False)) - self.result_map[outparam.key] = ( - outparam.key, + binds.append( + self.bindparam_string(self._truncate_bindparam(outparam))) + columns.append( + self.process(col_expr, within_columns_clause=False)) + + self._add_to_result_map( + outparam.key, outparam.key, (column, getattr(column, 'name', None), - getattr(column, 'key', None)), + getattr(column, 'key', None)), column.type ) return 'RETURNING ' + ', '.join(columns) + " INTO " + ", ".join(binds) def _TODO_visit_compound_select(self, select): - """Need to determine how to get ``LIMIT``/``OFFSET`` into a ``UNION`` for Oracle.""" + """Need to determine how to get ``LIMIT``/``OFFSET`` into a + ``UNION`` for Oracle. + """ pass def visit_select(self, select, **kwargs): @@ -629,76 +717,90 @@ def visit_select(self, select, **kwargs): if not getattr(select, '_oracle_visit', None): if not self.dialect.use_ansi: froms = self._display_froms_for_select( - select, kwargs.get('asfrom', False)) + select, kwargs.get('asfrom', False)) whereclause = self._get_nonansi_join_whereclause(froms) if whereclause is not None: select = select.where(whereclause) select._oracle_visit = True - if select._limit is not None or select._offset is not None: - # See http://www.oracle.com/technology/oramag/oracle/06-sep/o56asktom.html + limit_clause = select._limit_clause + offset_clause = select._offset_clause + if limit_clause is not None or offset_clause is not None: + # See http://www.oracle.com/technology/oramag/oracle/06-sep/\ + # o56asktom.html # # Generalized form of an Oracle pagination query: # select ... from ( - # select /*+ FIRST_ROWS(N) */ ...., rownum as ora_rn from ( - # select distinct ... where ... order by ... + # select /*+ FIRST_ROWS(N) */ ...., rownum as ora_rn from + # ( select distinct ... where ... order by ... # ) where ROWNUM <= :limit+:offset # ) where ora_rn > :offset - # Outer select and "ROWNUM as ora_rn" can be dropped if limit=0 + # Outer select and "ROWNUM as ora_rn" can be dropped if + # limit=0 - # TODO: use annotations instead of clone + attr set ? + kwargs['select_wraps_for'] = select select = select._generate() select._oracle_visit = True # Wrap the middle select and add the hint limitselect = sql.select([c for c in select.c]) - if select._limit and self.dialect.optimize_limits: - limitselect = limitselect.prefix_with("/*+ FIRST_ROWS(%d) */" % select._limit) + if limit_clause is not None and \ + self.dialect.optimize_limits and \ + select._simple_int_limit: + limitselect = limitselect.prefix_with( + "/*+ FIRST_ROWS(%d) */" % + select._limit) limitselect._oracle_visit = True limitselect._is_wrapper = True # If needed, add the limiting clause - if select._limit is not None: - max_row = select._limit - if select._offset is not None: - max_row += select._offset + if limit_clause is not None: if not self.dialect.use_binds_for_limits: + # use simple int limits, will raise an exception + # if the limit isn't specified this way + max_row = select._limit + + if offset_clause is not None: + max_row += select._offset max_row = sql.literal_column("%d" % max_row) + else: + max_row = limit_clause + if offset_clause is not None: + max_row = max_row + offset_clause limitselect.append_whereclause( - sql.literal_column("ROWNUM") <= max_row) + sql.literal_column("ROWNUM") <= max_row) # If needed, add the ora_rn, and wrap again with offset. - if select._offset is None: + if offset_clause is None: limitselect._for_update_arg = select._for_update_arg select = limitselect else: limitselect = limitselect.column( - sql.literal_column("ROWNUM").label("ora_rn")) + sql.literal_column("ROWNUM").label("ora_rn")) limitselect._oracle_visit = True limitselect._is_wrapper = True offsetselect = sql.select( - [c for c in limitselect.c if c.key != 'ora_rn']) + [c for c in limitselect.c if c.key != 'ora_rn']) offsetselect._oracle_visit = True offsetselect._is_wrapper = True - offset_value = select._offset if not self.dialect.use_binds_for_limits: - offset_value = sql.literal_column("%d" % offset_value) + offset_clause = sql.literal_column( + "%d" % select._offset) offsetselect.append_whereclause( - sql.literal_column("ora_rn") > offset_value) + sql.literal_column("ora_rn") > offset_clause) offsetselect._for_update_arg = select._for_update_arg select = offsetselect - kwargs['iswrapper'] = getattr(select, '_is_wrapper', False) return compiler.SQLCompiler.visit_select(self, select, **kwargs) - def limit_clause(self, select): + def limit_clause(self, select, **kw): return "" - def for_update_clause(self, select): + def for_update_clause(self, select, **kw): if self.is_subquery(): return "" @@ -706,9 +808,9 @@ def for_update_clause(self, select): if select._for_update_arg.of: tmp += ' OF ' + ', '.join( - self.process(elem) for elem in - select._for_update_arg.of - ) + self.process(elem, **kw) for elem in + select._for_update_arg.of + ) if select._for_update_arg.nowait: tmp += " NOWAIT" @@ -724,24 +826,68 @@ def define_constraint_cascades(self, constraint): text += " ON DELETE %s" % constraint.ondelete # oracle has no ON UPDATE CASCADE - - # its only available via triggers http://asktom.oracle.com/tkyte/update_cascade/index.html + # its only available via triggers + # http://asktom.oracle.com/tkyte/update_cascade/index.html if constraint.onupdate is not None: util.warn( "Oracle does not contain native UPDATE CASCADE " - "functionality - onupdates will not be rendered for foreign keys. " - "Consider using deferrable=True, initially='deferred' or triggers.") + "functionality - onupdates will not be rendered for foreign " + "keys. Consider using deferrable=True, initially='deferred' " + "or triggers.") return text - def visit_create_index(self, create, **kw): - return super(OracleDDLCompiler, self).\ - visit_create_index(create, include_schema=True) + def visit_create_index(self, create): + index = create.element + self._verify_index_table(index) + preparer = self.preparer + text = "CREATE " + if index.unique: + text += "UNIQUE " + if index.dialect_options['oracle']['bitmap']: + text += "BITMAP " + text += "INDEX %s ON %s (%s)" % ( + self._prepared_index_name(index, include_schema=True), + preparer.format_table(index.table, use_schema=True), + ', '.join( + self.sql_compiler.process( + expr, + include_table=False, literal_binds=True) + for expr in index.expressions) + ) + if index.dialect_options['oracle']['compress'] is not False: + if index.dialect_options['oracle']['compress'] is True: + text += " COMPRESS" + else: + text += " COMPRESS %d" % ( + index.dialect_options['oracle']['compress'] + ) + return text + + def post_create_table(self, table): + table_opts = [] + opts = table.dialect_options['oracle'] + + if opts['on_commit']: + on_commit_options = opts['on_commit'].replace("_", " ").upper() + table_opts.append('\n ON COMMIT %s' % on_commit_options) + + if opts['compress']: + if opts['compress'] is True: + table_opts.append("\n COMPRESS") + else: + table_opts.append("\n COMPRESS FOR %s" % ( + opts['compress'] + )) + + return ''.join(table_opts) class OracleIdentifierPreparer(compiler.IdentifierPreparer): reserved_words = set([x.lower() for x in RESERVED_WORDS]) - illegal_initial_characters = set(range(0, 10)).union(["_", "$"]) + illegal_initial_characters = set( + (str(dig) for dig in range(0, 10))).union(["_", "$"]) def _bindparam_requires_quotes(self, value): """Return True if the given identifier requires quoting.""" @@ -753,14 +899,16 @@ def _bindparam_requires_quotes(self, value): def format_savepoint(self, savepoint): name = re.sub(r'^_+', '', savepoint.ident) - return super(OracleIdentifierPreparer, self).format_savepoint(savepoint, name) + return super( + OracleIdentifierPreparer, self).format_savepoint(savepoint, name) class OracleExecutionContext(default.DefaultExecutionContext): def fire_sequence(self, seq, type_): - return self._execute_scalar("SELECT " + - self.dialect.identifier_preparer.format_sequence(seq) + - ".nextval FROM DUAL", type_) + return self._execute_scalar( + "SELECT " + + self.dialect.identifier_preparer.format_sequence(seq) + + ".nextval FROM DUAL", type_) class OracleDialect(default.DefaultDialect): @@ -772,6 +920,8 @@ class OracleDialect(default.DefaultDialect): supports_sane_rowcount = True supports_sane_multi_rowcount = False + supports_simple_order_by_label = False + supports_sequences = True sequences_optional = False postfetch_lastrowid = False @@ -793,14 +943,22 @@ class OracleDialect(default.DefaultDialect): reflection_options = ('oracle_resolve_synonyms', ) construct_arguments = [ - (sa_schema.Table, {"resolve_synonyms": False}) + (sa_schema.Table, { + "resolve_synonyms": False, + "on_commit": None, + "compress": False + }), + (sa_schema.Index, { + "bitmap": False, + "compress": False + }) ] def __init__(self, - use_ansi=True, - optimize_limits=False, - use_binds_for_limits=True, - **kwargs): + use_ansi=True, + optimize_limits=False, + use_binds_for_limits=True, + **kwargs): default.DefaultDialect.__init__(self, **kwargs) self.use_ansi = use_ansi self.optimize_limits = optimize_limits @@ -809,9 +967,9 @@ def __init__(self, def initialize(self, connection): super(OracleDialect, self).initialize(connection) self.implicit_returning = self.__dict__.get( - 'implicit_returning', - self.server_version_info > (10, ) - ) + 'implicit_returning', + self.server_version_info > (10, ) + ) if self._is_oracle_8: self.colspecs = self.colspecs.copy() @@ -821,7 +979,17 @@ def initialize(self, connection): @property def _is_oracle_8(self): return self.server_version_info and \ - self.server_version_info < (9, ) + self.server_version_info < (9, ) + + @property + def _supports_table_compression(self): + return self.server_version_info and \ + self.server_version_info >= (9, 2, ) + + @property + def _supports_table_compress_for(self): + return self.server_version_info and \ + self.server_version_info >= (11, ) @property def _supports_char_length(self): @@ -841,7 +1009,8 @@ def has_table(self, connection, table_name, schema=None): cursor = connection.execute( sql.text("SELECT table_name FROM all_tables " "WHERE table_name = :name AND owner = :schema_name"), - name=self.denormalize_name(table_name), schema_name=self.denormalize_name(schema)) + name=self.denormalize_name(table_name), + schema_name=self.denormalize_name(schema)) return cursor.first() is not None def has_sequence(self, connection, sequence_name, schema=None): @@ -849,8 +1018,10 @@ def has_sequence(self, connection, sequence_name, schema=None): schema = self.default_schema_name cursor = connection.execute( sql.text("SELECT sequence_name FROM all_sequences " - "WHERE sequence_name = :name AND sequence_owner = :schema_name"), - name=self.denormalize_name(sequence_name), schema_name=self.denormalize_name(schema)) + "WHERE sequence_name = :name AND " + "sequence_owner = :schema_name"), + name=self.denormalize_name(sequence_name), + schema_name=self.denormalize_name(schema)) return cursor.first() is not None def normalize_name(self, name): @@ -859,16 +1030,19 @@ def normalize_name(self, name): if util.py2k: if isinstance(name, str): name = name.decode(self.encoding) - if name.upper() == name and \ - not self.identifier_preparer._requires_quotes(name.lower()): + if name.upper() == name and not \ + self.identifier_preparer._requires_quotes(name.lower()): return name.lower() + elif name.lower() == name: + return quoted_name(name, quote=True) else: return name def denormalize_name(self, name): if name is None: return None - elif name.lower() == name and not self.identifier_preparer._requires_quotes(name.lower()): + elif name.lower() == name and not \ + self.identifier_preparer._requires_quotes(name.lower()): name = name.upper() if util.py2k: if not self.supports_unicode_binds: @@ -878,18 +1052,21 @@ def denormalize_name(self, name): return name def _get_default_schema_name(self, connection): - return self.normalize_name(connection.execute('SELECT USER FROM DUAL').scalar()) + return self.normalize_name( + connection.execute('SELECT USER FROM DUAL').scalar()) - def _resolve_synonym(self, connection, desired_owner=None, desired_synonym=None, desired_table=None): + def _resolve_synonym(self, connection, desired_owner=None, + desired_synonym=None, desired_table=None): """search for a local synonym matching the given desired owner/name. if desired_owner is None, attempts to locate a distinct owner. - returns the actual name, owner, dblink name, and synonym name if found. + returns the actual name, owner, dblink name, and synonym name if + found. """ q = "SELECT owner, table_owner, table_name, db_link, "\ - "synonym_name FROM all_synonyms WHERE " + "synonym_name FROM all_synonyms WHERE " clauses = [] params = {} if desired_synonym: @@ -908,16 +1085,20 @@ def _resolve_synonym(self, connection, desired_owner=None, desired_synonym=None, if desired_owner: row = result.first() if row: - return row['table_name'], row['table_owner'], row['db_link'], row['synonym_name'] + return (row['table_name'], row['table_owner'], + row['db_link'], row['synonym_name']) else: return None, None, None, None else: rows = result.fetchall() if len(rows) > 1: - raise AssertionError("There are multiple tables visible to the schema, you must specify owner") + raise AssertionError( + "There are multiple tables visible to the schema, you " + "must specify owner") elif len(rows) == 1: row = rows[0] - return row['table_name'], row['table_owner'], row['db_link'], row['synonym_name'] + return (row['table_name'], row['table_owner'], + row['db_link'], row['synonym_name']) else: return None, None, None, None @@ -927,10 +1108,10 @@ def _prepare_reflection_args(self, connection, table_name, schema=None, if resolve_synonyms: actual_name, owner, dblink, synonym = self._resolve_synonym( - connection, - desired_owner=self.denormalize_name(schema), - desired_synonym=self.denormalize_name(table_name) - ) + connection, + desired_owner=self.denormalize_name(schema), + desired_synonym=self.denormalize_name(table_name) + ) else: actual_name, owner, dblink, synonym = None, None, None, None if not actual_name: @@ -943,8 +1124,8 @@ def _prepare_reflection_args(self, connection, table_name, schema=None, # will need to hear from more users if we are doing # the right thing here. See [ticket:2619] owner = connection.scalar( - sql.text("SELECT username FROM user_db_links " - "WHERE db_link=:link"), link=dblink) + sql.text("SELECT username FROM user_db_links " + "WHERE db_link=:link"), link=dblink) dblink = "@" + dblink elif not owner: owner = self.denormalize_name(schema or self.default_schema_name) @@ -961,14 +1142,29 @@ def get_schema_names(self, connection, **kw): def get_table_names(self, connection, schema=None, **kw): schema = self.denormalize_name(schema or self.default_schema_name) - # note that table_names() isnt loading DBLINKed or synonym'ed tables + # note that table_names() isn't loading DBLINKed or synonym'ed tables if schema is None: schema = self.default_schema_name s = sql.text( "SELECT table_name FROM all_tables " - "WHERE nvl(tablespace_name, 'no tablespace') NOT IN ('SYSTEM', 'SYSAUX') " + "WHERE nvl(tablespace_name, 'no tablespace') NOT IN " + "('SYSTEM', 'SYSAUX') " + "AND OWNER = :owner " + "AND IOT_NAME IS NULL " + "AND DURATION IS NULL") + cursor = connection.execute(s, owner=schema) + return [self.normalize_name(row[0]) for row in cursor] + + @reflection.cache + def get_temp_table_names(self, connection, **kw): + schema = self.denormalize_name(self.default_schema_name) + s = sql.text( + "SELECT table_name FROM all_tables " + "WHERE nvl(tablespace_name, 'no tablespace') NOT IN " + "('SYSTEM', 'SYSAUX') " "AND OWNER = :owner " - "AND IOT_NAME IS NULL") + "AND IOT_NAME IS NULL " + "AND DURATION IS NOT NULL") cursor = connection.execute(s, owner=schema) return [self.normalize_name(row[0]) for row in cursor] @@ -979,6 +1175,50 @@ def get_view_names(self, connection, schema=None, **kw): cursor = connection.execute(s, owner=self.denormalize_name(schema)) return [self.normalize_name(row[0]) for row in cursor] + @reflection.cache + def get_table_options(self, connection, table_name, schema=None, **kw): + options = {} + + resolve_synonyms = kw.get('oracle_resolve_synonyms', False) + dblink = kw.get('dblink', '') + info_cache = kw.get('info_cache') + + (table_name, schema, dblink, synonym) = \ + self._prepare_reflection_args(connection, table_name, schema, + resolve_synonyms, dblink, + info_cache=info_cache) + + params = {"table_name": table_name} + + columns = ["table_name"] + if self._supports_table_compression: + columns.append("compression") + if self._supports_table_compress_for: + columns.append("compress_for") + + text = "SELECT %(columns)s "\ + "FROM ALL_TABLES%(dblink)s "\ + "WHERE table_name = :table_name" + + if schema is not None: + params['owner'] = schema + text += " AND owner = :owner " + text = text % {'dblink': dblink, 'columns': ", ".join(columns)} + + result = connection.execute(sql.text(text), **params) + + enabled = dict(DISABLED=False, ENABLED=True) + + row = result.first() + if row: + if "compression" in row and enabled.get(row.compression, False): + if "compress_for" in row: + options['oracle_compress'] = row.compress_for + else: + options['oracle_compress'] = True + + return options + @reflection.cache def get_columns(self, connection, table_name, schema=None, **kw): """ @@ -1007,9 +1247,9 @@ def get_columns(self, connection, table_name, schema=None, **kw): params = {"table_name": table_name} text = "SELECT column_name, data_type, %(char_length_col)s, "\ - "data_precision, data_scale, "\ - "nullable, data_default FROM ALL_TAB_COLUMNS%(dblink)s "\ - "WHERE table_name = :table_name" + "data_precision, data_scale, "\ + "nullable, data_default FROM ALL_TAB_COLUMNS%(dblink)s "\ + "WHERE table_name = :table_name" if schema is not None: params['owner'] = schema text += " AND owner = :owner " @@ -1020,7 +1260,8 @@ def get_columns(self, connection, table_name, schema=None, **kw): for row in c: (colname, orig_colname, coltype, length, precision, scale, nullable, default) = \ - (self.normalize_name(row[0]), row[0], row[1], row[2], row[3], row[4], row[5] == 'Y', row[6]) + (self.normalize_name(row[0]), row[0], row[1], row[ + 2], row[3], row[4], row[5] == 'Y', row[6]) if coltype == 'NUMBER': coltype = NUMBER(precision, scale) @@ -1063,7 +1304,8 @@ def get_indexes(self, connection, table_name, schema=None, params = {'table_name': table_name} text = \ - "SELECT a.index_name, a.column_name, b.uniqueness "\ + "SELECT a.index_name, a.column_name, "\ + "\nb.index_type, b.uniqueness, b.compression, b.prefix_length "\ "\nFROM ALL_IND_COLUMNS%(dblink)s a, "\ "\nALL_INDEXES%(dblink)s b "\ "\nWHERE "\ @@ -1089,6 +1331,7 @@ def get_indexes(self, connection, table_name, schema=None, dblink=dblink, info_cache=kw.get('info_cache')) pkeys = pk_constraint['constrained_columns'] uniqueness = dict(NONUNIQUE=False, UNIQUE=True) + enabled = dict(DISABLED=False, ENABLED=True) oracle_sys_col = re.compile(r'SYS_NC\d+\$', re.IGNORECASE) @@ -1107,21 +1350,28 @@ def remove_if_primary_key(index): for rset in rp: if rset.index_name != last_index_name: remove_if_primary_key(index) - index = dict(name=self.normalize_name(rset.index_name), column_names=[]) + index = dict(name=self.normalize_name(rset.index_name), + column_names=[], dialect_options={}) indexes.append(index) index['unique'] = uniqueness.get(rset.uniqueness, False) + if rset.index_type in ('BITMAP', 'FUNCTION-BASED BITMAP'): + index['dialect_options']['oracle_bitmap'] = True + if enabled.get(rset.compression, False): + index['dialect_options']['oracle_compress'] = rset.prefix_length + # filter out Oracle SYS_NC names. could also do an outer join # to the all_tab_columns table and check for real col names there. if not oracle_sys_col.match(rset.column_name): - index['column_names'].append(self.normalize_name(rset.column_name)) + index['column_names'].append( + self.normalize_name(rset.column_name)) last_index_name = rset.index_name remove_if_primary_key(index) return indexes @reflection.cache def _get_constraint_data(self, connection, table_name, schema=None, - dblink='', **kw): + dblink='', **kw): params = {'table_name': table_name} @@ -1170,9 +1420,9 @@ def get_pk_constraint(self, connection, table_name, schema=None, **kw): info_cache=info_cache) pkeys = [] constraint_name = None - constraint_data = self._get_constraint_data(connection, table_name, - schema, dblink, - info_cache=kw.get('info_cache')) + constraint_data = self._get_constraint_data( + connection, table_name, schema, dblink, + info_cache=kw.get('info_cache')) for row in constraint_data: (cons_name, cons_type, local_column, remote_table, remote_column, remote_owner) = \ @@ -1205,9 +1455,9 @@ def get_foreign_keys(self, connection, table_name, schema=None, **kw): resolve_synonyms, dblink, info_cache=info_cache) - constraint_data = self._get_constraint_data(connection, table_name, - schema, dblink, - info_cache=kw.get('info_cache')) + constraint_data = self._get_constraint_data( + connection, table_name, schema, dblink, + info_cache=kw.get('info_cache')) def fkey_rec(): return { @@ -1222,7 +1472,7 @@ def fkey_rec(): for row in constraint_data: (cons_name, cons_type, local_column, remote_table, remote_column, remote_owner) = \ - row[0:2] + tuple([self.normalize_name(x) for x in row[2:6]]) + row[0:2] + tuple([self.normalize_name(x) for x in row[2:6]]) if cons_type == 'R': if remote_table is None: @@ -1235,23 +1485,28 @@ def fkey_rec(): rec = fkeys[cons_name] rec['name'] = cons_name - local_cols, remote_cols = rec['constrained_columns'], rec['referred_columns'] + local_cols, remote_cols = rec[ + 'constrained_columns'], rec['referred_columns'] if not rec['referred_table']: if resolve_synonyms: ref_remote_name, ref_remote_owner, ref_dblink, ref_synonym = \ - self._resolve_synonym( - connection, - desired_owner=self.denormalize_name(remote_owner), - desired_table=self.denormalize_name(remote_table) - ) + self._resolve_synonym( + connection, + desired_owner=self.denormalize_name( + remote_owner), + desired_table=self.denormalize_name( + remote_table) + ) if ref_synonym: remote_table = self.normalize_name(ref_synonym) - remote_owner = self.normalize_name(ref_remote_owner) + remote_owner = self.normalize_name( + ref_remote_owner) rec['referred_table'] = remote_table - if requested_schema is not None or self.denormalize_name(remote_owner) != schema: + if requested_schema is not None or \ + self.denormalize_name(remote_owner) != schema: rec['referred_schema'] = remote_owner local_cols.append(local_column) diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py index b8ee90b53e..08a01b6492 100644 --- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py +++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py @@ -1,5 +1,6 @@ # oracle/cx_oracle.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -9,7 +10,8 @@ .. dialect:: oracle+cx_oracle :name: cx-Oracle :dbapi: cx_oracle - :connectstring: oracle+cx_oracle://user:pass@host:port/dbname[?key=value&key=value...] + :connectstring: oracle+cx_oracle://user:pass@host:port/dbname\ +[?key=value&key=value...] :url: http://cx-oracle.sourceforge.net/ Additional Connect Arguments @@ -51,21 +53,29 @@ .. versionadded:: 0.8 specific DBAPI types can be excluded from the auto_setinputsizes feature via the exclude_setinputsizes attribute. -* ``mode`` - This is given the string value of SYSDBA or SYSOPER, or alternatively - an integer value. This value is only available as a URL query string - argument. +* ``mode`` - This is given the string value of SYSDBA or SYSOPER, or + alternatively an integer value. This value is only available as a URL query + string argument. -* ``threaded`` - enable multithreaded access to cx_oracle connections. Defaults - to ``True``. Note that this is the opposite default of the cx_Oracle DBAPI - itself. +* ``threaded`` - enable multithreaded access to cx_oracle connections. + Defaults to ``True``. Note that this is the opposite default of the + cx_Oracle DBAPI itself. + +* ``service_name`` - An option to use connection string (DSN) with + ``SERVICE_NAME`` instead of ``SID``. It can't be passed when a ``database`` + part is given. + E.g. ``oracle+cx_oracle://scott:tiger@host:1521/?service_name=hr`` + is a valid url. This value is only available as a URL query string argument. + + .. versionadded:: 1.0.0 .. _cx_oracle_unicode: Unicode ------- -The cx_Oracle DBAPI as of version 5 fully supports unicode, and has the ability -to return string results as Python unicode objects natively. +The cx_Oracle DBAPI as of version 5 fully supports unicode, and has the +ability to return string results as Python unicode objects natively. When used in Python 3, cx_Oracle returns all strings as Python unicode objects (that is, plain ``str`` in Python 3). In Python 2, it will return as Python @@ -73,37 +83,39 @@ column values that are of type ``VARCHAR`` or other non-unicode string types, it will return values as Python strings (e.g. bytestrings). -The cx_Oracle SQLAlchemy dialect presents two different options for the use case of -returning ``VARCHAR`` column values as Python unicode objects under Python 2: +The cx_Oracle SQLAlchemy dialect presents two different options for the use +case of returning ``VARCHAR`` column values as Python unicode objects under +Python 2: * the cx_Oracle DBAPI has the ability to coerce all string results to Python unicode objects unconditionally using output type handlers. This has the advantage that the unicode conversion is global to all statements at the cx_Oracle driver level, meaning it works with raw textual SQL statements that have no typing information associated. However, this system - has been observed to incur signfiicant performance overhead, not only because - it takes effect for all string values unconditionally, but also because cx_Oracle under - Python 2 seems to use a pure-Python function call in order to do the - decode operation, which under cPython can orders of magnitude slower - than doing it using C functions alone. - -* SQLAlchemy has unicode-decoding services built in, and when using SQLAlchemy's - C extensions, these functions do not use any Python function calls and - are very fast. The disadvantage to this approach is that the unicode - conversion only takes effect for statements where the :class:`.Unicode` type - or :class:`.String` type with ``convert_unicode=True`` is explicitly - associated with the result column. This is the case for any ORM or Core - query or SQL expression as well as for a :func:`.text` construct that specifies - output column types, so in the vast majority of cases this is not an issue. - However, when sending a completely raw string to :meth:`.Connection.execute`, - this typing information isn't present, unless the string is handled - within a :func:`.text` construct that adds typing information. + has been observed to incur signfiicant performance overhead, not only + because it takes effect for all string values unconditionally, but also + because cx_Oracle under Python 2 seems to use a pure-Python function call in + order to do the decode operation, which under cPython can orders of + magnitude slower than doing it using C functions alone. + +* SQLAlchemy has unicode-decoding services built in, and when using + SQLAlchemy's C extensions, these functions do not use any Python function + calls and are very fast. The disadvantage to this approach is that the + unicode conversion only takes effect for statements where the + :class:`.Unicode` type or :class:`.String` type with + ``convert_unicode=True`` is explicitly associated with the result column. + This is the case for any ORM or Core query or SQL expression as well as for + a :func:`.text` construct that specifies output column types, so in the vast + majority of cases this is not an issue. However, when sending a completely + raw string to :meth:`.Connection.execute`, this typing information isn't + present, unless the string is handled within a :func:`.text` construct that + adds typing information. As of version 0.9.2 of SQLAlchemy, the default approach is to use SQLAlchemy's typing system. This keeps cx_Oracle's expensive Python 2 approach -disabled unless the user explicitly wants it. Under Python 3, SQLAlchemy detects -that cx_Oracle is returning unicode objects natively and cx_Oracle's system -is used. +disabled unless the user explicitly wants it. Under Python 3, SQLAlchemy +detects that cx_Oracle is returning unicode objects natively and cx_Oracle's +system is used. To re-enable cx_Oracle's output type handler under Python 2, the ``coerce_to_unicode=True`` flag (new in 0.9.4) can be passed to @@ -116,12 +128,13 @@ the :func:`.text` feature can be used:: from sqlalchemy import text, Unicode - result = conn.execute(text("select username from user").columns(username=Unicode)) + result = conn.execute( + text("select username from user").columns(username=Unicode)) -.. versionchanged:: 0.9.2 cx_Oracle's outputtypehandlers are no longer used for - unicode results of non-unicode datatypes in Python 2, after they were identified as a major - performance bottleneck. SQLAlchemy's own unicode facilities are used - instead. +.. versionchanged:: 0.9.2 cx_Oracle's outputtypehandlers are no longer used + for unicode results of non-unicode datatypes in Python 2, after they were + identified as a major performance bottleneck. SQLAlchemy's own unicode + facilities are used instead. .. versionadded:: 0.9.4 Added the ``coerce_to_unicode`` flag, to re-enable cx_Oracle's outputtypehandler and revert to pre-0.9.2 behavior. @@ -131,38 +144,43 @@ RETURNING Support ----------------- -The cx_oracle DBAPI supports a limited subset of Oracle's already limited RETURNING support. -Typically, results can only be guaranteed for at most one column being returned; -this is the typical case when SQLAlchemy uses RETURNING to get just the value of a -primary-key-associated sequence value. Additional column expressions will -cause problems in a non-determinative way, due to cx_oracle's lack of support for -the OCI_DATA_AT_EXEC API which is required for more complex RETURNING scenarios. +The cx_oracle DBAPI supports a limited subset of Oracle's already limited +RETURNING support. Typically, results can only be guaranteed for at most one +column being returned; this is the typical case when SQLAlchemy uses RETURNING +to get just the value of a primary-key-associated sequence value. +Additional column expressions will cause problems in a non-determinative way, +due to cx_oracle's lack of support for the OCI_DATA_AT_EXEC API which is +required for more complex RETURNING scenarios. -For this reason, stability may be enhanced by disabling RETURNING support completely; -SQLAlchemy otherwise will use RETURNING to fetch newly sequence-generated -primary keys. As illustrated in :ref:`oracle_returning`:: +For this reason, stability may be enhanced by disabling RETURNING support +completely; SQLAlchemy otherwise will use RETURNING to fetch newly +sequence-generated primary keys. As illustrated in :ref:`oracle_returning`:: - engine = create_engine("oracle://scott:tiger@dsn", implicit_returning=False) + engine = create_engine("oracle://scott:tiger@dsn", + implicit_returning=False) .. seealso:: - http://docs.oracle.com/cd/B10501_01/appdev.920/a96584/oci05bnd.htm#420693 - OCI documentation for RETURNING + http://docs.oracle.com/cd/B10501_01/appdev.920/a96584/oci05bnd.htm#420693 + - OCI documentation for RETURNING - http://sourceforge.net/mailarchive/message.php?msg_id=31338136 - cx_oracle developer commentary + http://sourceforge.net/mailarchive/message.php?msg_id=31338136 + - cx_oracle developer commentary .. _cx_oracle_lob: LOB Objects ----------- -cx_oracle returns oracle LOBs using the cx_oracle.LOB object. SQLAlchemy converts -these to strings so that the interface of the Binary type is consistent with that of -other backends, and so that the linkage to a live cursor is not needed in scenarios -like result.fetchmany() and result.fetchall(). This means that by default, LOB -objects are fully fetched unconditionally by SQLAlchemy, and the linkage to a live -cursor is broken. +cx_oracle returns oracle LOBs using the cx_oracle.LOB object. SQLAlchemy +converts these to strings so that the interface of the Binary type is +consistent with that of other backends, and so that the linkage to a live +cursor is not needed in scenarios like result.fetchmany() and +result.fetchall(). This means that by default, LOB objects are fully fetched +unconditionally by SQLAlchemy, and the linkage to a live cursor is broken. -To disable this processing, pass ``auto_convert_lobs=False`` to :func:`.create_engine()`. +To disable this processing, pass ``auto_convert_lobs=False`` to +:func:`.create_engine()`. Two Phase Transaction Support ----------------------------- @@ -255,7 +273,7 @@ locale. Under OCI_, this is controlled by the NLS_LANG environment variable. Upon first connection, the dialect runs a test to determine the current "decimal" character, which can be -a comma "," for european locales. From that point forward the +a comma "," for European locales. From that point forward the outputtypehandler uses that character to represent a decimal point. Note that cx_oracle 5.0.3 or greater is required when dealing with numerics with locale settings that don't use @@ -275,6 +293,7 @@ from . import base as oracle from ...engine import result as _result from sqlalchemy import types as sqltypes, util, exc, processors +from sqlalchemy import util import random import collections import decimal @@ -313,7 +332,7 @@ def to_decimal(value): if self.precision is None and self.scale is None: return processors.to_float elif not getattr(self, '_is_oracle_number', False) \ - and self.scale is not None: + and self.scale is not None: return processors.to_float else: return None @@ -321,7 +340,7 @@ def to_decimal(value): # cx_oracle 4 behavior, will assume # floats return super(_OracleNumeric, self).\ - result_processor(dialect, coltype) + result_processor(dialect, coltype) class _OracleDate(sqltypes.Date): @@ -362,7 +381,8 @@ def process(value): return unicode(value) return process else: - return super(_NativeUnicodeMixin, self).bind_processor(dialect) + return super( + _NativeUnicodeMixin, self).bind_processor(dialect) # we apply a connection output handler that returns # unicode in all cases, so the "native_unicode" flag @@ -391,11 +411,13 @@ class _OracleLong(oracle.LONG): def get_dbapi_type(self, dbapi): return dbapi.LONG_STRING + class _OracleString(_NativeUnicodeMixin, sqltypes.String): pass -class _OracleUnicodeText(_LOBMixin, _NativeUnicodeMixin, sqltypes.UnicodeText): +class _OracleUnicodeText( + _LOBMixin, _NativeUnicodeMixin, sqltypes.UnicodeText): def get_dbapi_type(self, dbapi): return dbapi.NCLOB @@ -404,7 +426,8 @@ def result_processor(self, dialect, coltype): if lob_processor is None: return None - string_processor = sqltypes.UnicodeText.result_processor(self, dialect, coltype) + string_processor = sqltypes.UnicodeText.result_processor( + self, dialect, coltype) if string_processor is None: return lob_processor @@ -449,7 +472,7 @@ class OracleCompiler_cx_oracle(OracleCompiler): def bindparam_string(self, name, **kw): quote = getattr(name, 'quote', None) if quote is True or quote is not False and \ - self.preparer._bindparam_requires_quotes(name): + self.preparer._bindparam_requires_quotes(name): quoted_name = '"%s"' % name self._quoted_bind_names[name] = quoted_name return OracleCompiler.bindparam_string(self, quoted_name, **kw) @@ -469,12 +492,12 @@ def pre_exec(self): # here. so convert names in quoted_bind_names # to encoded as well. quoted_bind_names = \ - dict( - (fromname.encode(self.dialect.encoding), - toname.encode(self.dialect.encoding)) - for fromname, toname in - quoted_bind_names.items() - ) + dict( + (fromname.encode(self.dialect.encoding), + toname.encode(self.dialect.encoding)) + for fromname, toname in + quoted_bind_names.items() + ) for param in self.parameters: for fromname, toname in quoted_bind_names.items(): param[toname] = param[fromname] @@ -484,29 +507,30 @@ def pre_exec(self): # cx_oracle really has issues when you setinputsizes # on String, including that outparams/RETURNING # breaks for varchars - self.set_input_sizes(quoted_bind_names, - exclude_types=self.dialect.exclude_setinputsizes - ) + self.set_input_sizes( + quoted_bind_names, + exclude_types=self.dialect.exclude_setinputsizes + ) # if a single execute, check for outparams if len(self.compiled_parameters) == 1: for bindparam in self.compiled.binds.values(): if bindparam.isoutparam: dbtype = bindparam.type.dialect_impl(self.dialect).\ - get_dbapi_type(self.dialect.dbapi) + get_dbapi_type(self.dialect.dbapi) if not hasattr(self, 'out_parameters'): self.out_parameters = {} if dbtype is None: raise exc.InvalidRequestError( - "Cannot create out parameter for parameter " - "%r - it's type %r is not supported by" - " cx_oracle" % - (bindparam.key, bindparam.type) - ) + "Cannot create out parameter for parameter " + "%r - its type %r is not supported by" + " cx_oracle" % + (bindparam.key, bindparam.type) + ) name = self.compiled.bind_names[bindparam] self.out_parameters[name] = self.cursor.var(dbtype) self.parameters[0][quoted_bind_names.get(name, name)] = \ - self.out_parameters[name] + self.out_parameters[name] def create_cursor(self): c = self._dbapi_connection.cursor() @@ -518,9 +542,9 @@ def create_cursor(self): def get_result_proxy(self): if hasattr(self, 'out_parameters') and self.compiled.returning: returning_params = dict( - (k, v.getvalue()) - for k, v in self.out_parameters.items() - ) + (k, v.getvalue()) + for k, v in self.out_parameters.items() + ) return ReturningResultProxy(self, returning_params) result = None @@ -542,25 +566,29 @@ def get_result_proxy(self): if name in self.out_parameters: type = bind.type impl_type = type.dialect_impl(self.dialect) - dbapi_type = impl_type.get_dbapi_type(self.dialect.dbapi) + dbapi_type = impl_type.get_dbapi_type( + self.dialect.dbapi) result_processor = impl_type.\ - result_processor(self.dialect, - dbapi_type) + result_processor(self.dialect, + dbapi_type) if result_processor is not None: out_parameters[name] = \ - result_processor(self.out_parameters[name].getvalue()) + result_processor( + self.out_parameters[name].getvalue()) else: - out_parameters[name] = self.out_parameters[name].getvalue() + out_parameters[name] = self.out_parameters[ + name].getvalue() else: result.out_parameters = dict( (k, v.getvalue()) - for k, v in self.out_parameters.items() - ) + for k, v in self.out_parameters.items() + ) return result -class OracleExecutionContext_cx_oracle_with_unicode(OracleExecutionContext_cx_oracle): +class OracleExecutionContext_cx_oracle_with_unicode( + OracleExecutionContext_cx_oracle): """Support WITH_UNICODE in Python 2.xx. WITH_UNICODE allows cx_Oracle's Python 3 unicode handling @@ -573,17 +601,19 @@ class OracleExecutionContext_cx_oracle_with_unicode(OracleExecutionContext_cx_or passed as Python unicode objects. """ + def __init__(self, *arg, **kw): OracleExecutionContext_cx_oracle.__init__(self, *arg, **kw) self.statement = util.text_type(self.statement) def _execute_scalar(self, stmt): return super(OracleExecutionContext_cx_oracle_with_unicode, self).\ - _execute_scalar(util.text_type(stmt)) + _execute_scalar(util.text_type(stmt)) class ReturningResultProxy(_result.FullyBufferedResultProxy): - """Result proxy which stuffs the _returning clause + outparams into the fetch.""" + """Result proxy which stuffs the _returning clause + outparams + into the fetch.""" def __init__(self, context, returning_params): self._returning_params = returning_params @@ -597,8 +627,10 @@ def _cursor_description(self): ] def _buffer_rows(self): - return collections.deque([tuple(self._returning_params["ret_%d" % i] - for i, c in enumerate(self._returning_params))]) + return collections.deque( + [tuple(self._returning_params["ret_%d" % i] + for i, c in enumerate(self._returning_params))] + ) class OracleDialect_cx_oracle(OracleDialect): @@ -609,7 +641,8 @@ class OracleDialect_cx_oracle(OracleDialect): colspecs = colspecs = { sqltypes.Numeric: _OracleNumeric, - sqltypes.Date: _OracleDate, # generic type, assume datetime.date is desired + # generic type, assume datetime.date is desired + sqltypes.Date: _OracleDate, sqltypes.LargeBinary: _OracleBinary, sqltypes.Boolean: oracle._OracleBoolean, sqltypes.Interval: _OracleInterval, @@ -636,50 +669,50 @@ class OracleDialect_cx_oracle(OracleDialect): execute_sequence_format = list def __init__(self, - auto_setinputsizes=True, - exclude_setinputsizes=("STRING", "UNICODE"), - auto_convert_lobs=True, - threaded=True, - allow_twophase=True, - coerce_to_decimal=True, - coerce_to_unicode=False, - arraysize=50, **kwargs): + auto_setinputsizes=True, + exclude_setinputsizes=("STRING", "UNICODE"), + auto_convert_lobs=True, + threaded=True, + allow_twophase=True, + coerce_to_decimal=True, + coerce_to_unicode=False, + arraysize=50, **kwargs): OracleDialect.__init__(self, **kwargs) self.threaded = threaded self.arraysize = arraysize self.allow_twophase = allow_twophase self.supports_timestamp = self.dbapi is None or \ - hasattr(self.dbapi, 'TIMESTAMP') + hasattr(self.dbapi, 'TIMESTAMP') self.auto_setinputsizes = auto_setinputsizes self.auto_convert_lobs = auto_convert_lobs if hasattr(self.dbapi, 'version'): self.cx_oracle_ver = tuple([int(x) for x in - self.dbapi.version.split('.')]) + self.dbapi.version.split('.')]) else: self.cx_oracle_ver = (0, 0, 0) def types(*names): return set( - getattr(self.dbapi, name, None) for name in names - ).difference([None]) + getattr(self.dbapi, name, None) for name in names + ).difference([None]) self.exclude_setinputsizes = types(*(exclude_setinputsizes or ())) self._cx_oracle_string_types = types("STRING", "UNICODE", - "NCLOB", "CLOB") + "NCLOB", "CLOB") self._cx_oracle_unicode_types = types("UNICODE", "NCLOB") self._cx_oracle_binary_types = types("BFILE", "CLOB", "NCLOB", "BLOB") self.supports_unicode_binds = self.cx_oracle_ver >= (5, 0) self.coerce_to_unicode = ( - self.cx_oracle_ver >= (5, 0) and - coerce_to_unicode - ) + self.cx_oracle_ver >= (5, 0) and + coerce_to_unicode + ) self.supports_native_decimal = ( - self.cx_oracle_ver >= (5, 0) and - coerce_to_decimal - ) + self.cx_oracle_ver >= (5, 0) and + coerce_to_decimal + ) self._cx_oracle_native_nvarchar = self.cx_oracle_ver >= (5, 0) @@ -687,7 +720,10 @@ def types(*names): # this occurs in tests with mock DBAPIs self._cx_oracle_string_types = set() self._cx_oracle_with_unicode = False - elif self.cx_oracle_ver >= (5,) and not hasattr(self.dbapi, 'UNICODE'): + elif util.py3k or ( + self.cx_oracle_ver >= (5,) and not \ + hasattr(self.dbapi, 'UNICODE') + ): # cx_Oracle WITH_UNICODE mode. *only* python # unicode objects accepted for anything self.supports_unicode_statements = True @@ -695,32 +731,32 @@ def types(*names): self._cx_oracle_with_unicode = True if util.py2k: - # There's really no reason to run with WITH_UNICODE under Python 2.x. - # Give the user a hint. + # There's really no reason to run with WITH_UNICODE under + # Python 2.x. Give the user a hint. util.warn( "cx_Oracle is compiled under Python 2.xx using the " "WITH_UNICODE flag. Consider recompiling cx_Oracle " - "without this flag, which is in no way necessary for full " - "support of Unicode. Otherwise, all string-holding bind " - "parameters must be explicitly typed using SQLAlchemy's " - "String type or one of its subtypes," + "without this flag, which is in no way necessary for " + "full support of Unicode. Otherwise, all string-holding " + "bind parameters must be explicitly typed using " + "SQLAlchemy's String type or one of its subtypes," "or otherwise be passed as Python unicode. " "Plain Python strings passed as bind parameters will be " "silently corrupted by cx_Oracle." - ) + ) self.execution_ctx_cls = \ - OracleExecutionContext_cx_oracle_with_unicode + OracleExecutionContext_cx_oracle_with_unicode else: self._cx_oracle_with_unicode = False if self.cx_oracle_ver is None or \ - not self.auto_convert_lobs or \ - not hasattr(self.dbapi, 'CLOB'): + not self.auto_convert_lobs or \ + not hasattr(self.dbapi, 'CLOB'): self.dbapi_type_map = {} else: # only use this for LOB objects. using it for strings, dates - # etc. leads to a little too much magic, reflection doesn't know if it should - # expect encoded strings or unicodes, etc. + # etc. leads to a little too much magic, reflection doesn't know + # if it should expect encoded strings or unicodes, etc. self.dbapi_type_map = { self.dbapi.CLOB: oracle.CLOB(), self.dbapi.NCLOB: oracle.NCLOB(), @@ -741,7 +777,7 @@ def initialize(self, connection): def _detect_decimal_char(self, connection): """detect if the decimal separator character is not '.', as - is the case with european locale settings for NLS_LANG. + is the case with European locale settings for NLS_LANG. cx_oracle itself uses similar logic when it formats Python Decimal objects to strings on the bind side (as of 5.0.3), @@ -763,8 +799,8 @@ def _detect_decimal_char(self, connection): def output_type_handler(cursor, name, defaultType, size, precision, scale): return cursor.var( - cx_Oracle.STRING, - 255, arraysize=cursor.arraysize) + cx_Oracle.STRING, + 255, arraysize=cursor.arraysize) cursor = conn.cursor() cursor.outputtypehandler = output_type_handler @@ -795,17 +831,17 @@ def on_connect(self): cx_Oracle = self.dbapi def output_type_handler(cursor, name, defaultType, - size, precision, scale): + size, precision, scale): # convert all NUMBER with precision + positive scale to Decimal # this almost allows "native decimal" mode. if self.supports_native_decimal and \ defaultType == cx_Oracle.NUMBER and \ precision and scale > 0: return cursor.var( - cx_Oracle.STRING, - 255, - outconverter=self._to_decimal, - arraysize=cursor.arraysize) + cx_Oracle.STRING, + 255, + outconverter=self._to_decimal, + arraysize=cursor.arraysize) # if NUMBER with zero precision and 0 or neg scale, this appears # to indicate "ambiguous". Use a slower converter that will # make a decision based on each value received - the type @@ -815,10 +851,10 @@ def output_type_handler(cursor, name, defaultType, defaultType == cx_Oracle.NUMBER \ and not precision and scale <= 0: return cursor.var( - cx_Oracle.STRING, - 255, - outconverter=self._detect_decimal, - arraysize=cursor.arraysize) + cx_Oracle.STRING, + 255, + outconverter=self._detect_decimal, + arraysize=cursor.arraysize) # allow all strings to come back natively as Unicode elif self.coerce_to_unicode and \ defaultType in (cx_Oracle.STRING, cx_Oracle.FIXED_CHAR): @@ -837,25 +873,41 @@ def create_connect_args(self, url): util.coerce_kw_type(dialect_opts, opt, bool) setattr(self, opt, dialect_opts[opt]) - if url.database: + database = url.database + service_name = dialect_opts.get('service_name', None) + if database or service_name: # if we have a database, then we have a remote host port = url.port if port: port = int(port) else: port = 1521 - dsn = self.dbapi.makedsn(url.host, port, url.database) + + if database and service_name: + raise exc.InvalidRequestError( + '"service_name" option shouldn\'t ' + 'be used with a "database" part of the url') + if database: + makedsn_kwargs = {'sid': database} + if service_name: + makedsn_kwargs = {'service_name': service_name} + + dsn = self.dbapi.makedsn(url.host, port, **makedsn_kwargs) else: # we have a local tnsname dsn = url.host opts = dict( - user=url.username, - password=url.password, - dsn=dsn, threaded=self.threaded, twophase=self.allow_twophase, - ) + ) + + if dsn is not None: + opts['dsn'] = dsn + if url.password is not None: + opts['password'] = url.password + if url.username is not None: + opts['user'] = url.username if util.py2k: if self._cx_oracle_with_unicode: @@ -881,9 +933,9 @@ def create_connect_args(self, url): def _get_server_version_info(self, connection): return tuple( - int(x) - for x in connection.connection.version.split('.') - ) + int(x) + for x in connection.connection.version.split('.') + ) def is_disconnect(self, e, connection, cursor): error, = e.args @@ -923,11 +975,11 @@ def do_prepare_twophase(self, connection, xid): connection.info['cx_oracle_prepared'] = result def do_rollback_twophase(self, connection, xid, is_prepared=True, - recover=False): + recover=False): self.do_rollback(connection.connection) def do_commit_twophase(self, connection, xid, is_prepared=True, - recover=False): + recover=False): if not is_prepared: self.do_commit(connection.connection) else: diff --git a/lib/sqlalchemy/dialects/oracle/zxjdbc.py b/lib/sqlalchemy/dialects/oracle/zxjdbc.py index 710645b237..c3259feae0 100644 --- a/lib/sqlalchemy/dialects/oracle/zxjdbc.py +++ b/lib/sqlalchemy/dialects/oracle/zxjdbc.py @@ -1,5 +1,6 @@ # oracle/zxjdbc.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -9,7 +10,10 @@ :name: zxJDBC for Jython :dbapi: zxjdbc :connectstring: oracle+zxjdbc://user:pass@host/dbname - :driverurl: http://www.oracle.com/technology/software/tech/java/sqlj_jdbc/index.html. + :driverurl: http://www.oracle.com/technetwork/database/features/jdbc/index-091264.html + + .. note:: Jython is not supported by current versions of SQLAlchemy. The + zxjdbc dialect should be considered as experimental. """ import decimal @@ -17,7 +21,9 @@ from sqlalchemy import sql, types as sqltypes, util from sqlalchemy.connectors.zxJDBC import ZxJDBCConnector -from sqlalchemy.dialects.oracle.base import OracleCompiler, OracleDialect, OracleExecutionContext +from sqlalchemy.dialects.oracle.base import (OracleCompiler, + OracleDialect, + OracleExecutionContext) from sqlalchemy.engine import result as _result from sqlalchemy.sql import expression import collections @@ -39,7 +45,7 @@ def process(value): class _ZxJDBCNumeric(sqltypes.Numeric): def result_processor(self, dialect, coltype): - #XXX: does the dialect return Decimal or not??? + # XXX: does the dialect return Decimal or not??? # if it does (in all cases), we could use a None processor as well as # the to_float generic processor if self.asdecimal: @@ -60,10 +66,11 @@ def process(value): class OracleCompiler_zxjdbc(OracleCompiler): def returning_clause(self, stmt, returning_cols): - self.returning_cols = list(expression._select_iterables(returning_cols)) + self.returning_cols = list( + expression._select_iterables(returning_cols)) # within_columns_clause=False so that labels (foo AS bar) don't render - columns = [self.process(c, within_columns_clause=False, result_map=self.result_map) + columns = [self.process(c, within_columns_clause=False) for c in self.returning_cols] if not hasattr(self, 'returning_parameters'): @@ -71,12 +78,15 @@ def returning_clause(self, stmt, returning_cols): binds = [] for i, col in enumerate(self.returning_cols): - dbtype = col.type.dialect_impl(self.dialect).get_dbapi_type(self.dialect.dbapi) + dbtype = col.type.dialect_impl( + self.dialect).get_dbapi_type(self.dialect.dbapi) self.returning_parameters.append((i + 1, dbtype)) - bindparam = sql.bindparam("ret_%d" % i, value=ReturningParam(dbtype)) + bindparam = sql.bindparam( + "ret_%d" % i, value=ReturningParam(dbtype)) self.binds[bindparam.key] = bindparam - binds.append(self.bindparam_string(self._truncate_bindparam(bindparam))) + binds.append( + self.bindparam_string(self._truncate_bindparam(bindparam))) return 'RETURNING ' + ', '.join(columns) + " INTO " + ", ".join(binds) @@ -97,13 +107,17 @@ def get_result_proxy(self): rrs = self.statement.__statement__.getReturnResultSet() next(rrs) except SQLException as sqle: - msg = '%s [SQLCode: %d]' % (sqle.getMessage(), sqle.getErrorCode()) + msg = '%s [SQLCode: %d]' % ( + sqle.getMessage(), sqle.getErrorCode()) if sqle.getSQLState() is not None: msg += ' [SQLState: %s]' % sqle.getSQLState() raise zxJDBC.Error(msg) else: - row = tuple(self.cursor.datahandler.getPyObject(rrs, index, dbtype) - for index, dbtype in self.compiled.returning_parameters) + row = tuple( + self.cursor.datahandler.getPyObject( + rrs, index, dbtype) + for index, dbtype in + self.compiled.returning_parameters) return ReturningResultProxy(self, row) finally: if rrs is not None: @@ -164,8 +178,8 @@ def __ne__(self, other): def __repr__(self): kls = self.__class__ - return '<%s.%s object at 0x%x type=%s>' % (kls.__module__, kls.__name__, id(self), - self.type) + return '<%s.%s object at 0x%x type=%s>' % ( + kls.__module__, kls.__name__, id(self), self.type) class OracleDialect_zxjdbc(ZxJDBCConnector, OracleDialect): @@ -206,13 +220,16 @@ def setJDBCObject(self, statement, index, object, dbtype=None): def initialize(self, connection): super(OracleDialect_zxjdbc, self).initialize(connection) - self.implicit_returning = connection.connection.driverversion >= '10.2' + self.implicit_returning = \ + connection.connection.driverversion >= '10.2' def _create_jdbc_url(self, url): - return 'jdbc:oracle:thin:@%s:%s:%s' % (url.host, url.port or 1521, url.database) + return 'jdbc:oracle:thin:@%s:%s:%s' % ( + url.host, url.port or 1521, url.database) def _get_server_version_info(self, connection): - version = re.search(r'Release ([\d\.]+)', connection.connection.dbversion).group(1) + version = re.search( + r'Release ([\d\.]+)', connection.connection.dbversion).group(1) return tuple(int(x) for x in version.split('.')) dialect = OracleDialect_zxjdbc diff --git a/lib/sqlalchemy/dialects/postgres.py b/lib/sqlalchemy/dialects/postgres.py index 6ed7e18bc8..04d37a2e6e 100644 --- a/lib/sqlalchemy/dialects/postgres.py +++ b/lib/sqlalchemy/dialects/postgres.py @@ -1,5 +1,6 @@ # dialects/postgres.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -8,9 +9,10 @@ from sqlalchemy.util import warn_deprecated warn_deprecated( - "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'. " - "The new URL format is postgresql[+driver]://:@/" - ) + "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to " + "'postgresql'. The new URL format is " + "postgresql[+driver]://:@/" +) from sqlalchemy.dialects.postgresql import * from sqlalchemy.dialects.postgresql import base diff --git a/lib/sqlalchemy/dialects/postgresql/__init__.py b/lib/sqlalchemy/dialects/postgresql/__init__.py index 180e9fc7e4..006afbdd9c 100644 --- a/lib/sqlalchemy/dialects/postgresql/__init__.py +++ b/lib/sqlalchemy/dialects/postgresql/__init__.py @@ -1,29 +1,31 @@ # postgresql/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -from . import base, psycopg2, pg8000, pypostgresql, zxjdbc +from . import base, psycopg2, pg8000, pypostgresql, zxjdbc, psycopg2cffi base.dialect = psycopg2.dialect from .base import \ INTEGER, BIGINT, SMALLINT, VARCHAR, CHAR, TEXT, NUMERIC, FLOAT, REAL, \ - INET, CIDR, UUID, BIT, MACADDR, DOUBLE_PRECISION, TIMESTAMP, TIME, \ + INET, CIDR, UUID, BIT, MACADDR, OID, DOUBLE_PRECISION, TIMESTAMP, TIME, \ DATE, BYTEA, BOOLEAN, INTERVAL, ARRAY, ENUM, dialect, array, Any, All, \ - TSVECTOR + TSVECTOR, DropEnumType from .constraints import ExcludeConstraint from .hstore import HSTORE, hstore -from .json import JSON, JSONElement +from .json import JSON, JSONElement, JSONB from .ranges import INT4RANGE, INT8RANGE, NUMRANGE, DATERANGE, TSRANGE, \ TSTZRANGE __all__ = ( 'INTEGER', 'BIGINT', 'SMALLINT', 'VARCHAR', 'CHAR', 'TEXT', 'NUMERIC', - 'FLOAT', 'REAL', 'INET', 'CIDR', 'UUID', 'BIT', 'MACADDR', + 'FLOAT', 'REAL', 'INET', 'CIDR', 'UUID', 'BIT', 'MACADDR', 'OID', 'DOUBLE_PRECISION', 'TIMESTAMP', 'TIME', 'DATE', 'BYTEA', 'BOOLEAN', 'INTERVAL', 'ARRAY', 'ENUM', 'dialect', 'Any', 'All', 'array', 'HSTORE', 'hstore', 'INT4RANGE', 'INT8RANGE', 'NUMRANGE', 'DATERANGE', - 'TSRANGE', 'TSTZRANGE', 'json', 'JSON', 'JSONElement' + 'TSRANGE', 'TSTZRANGE', 'json', 'JSON', 'JSONB', 'JSONElement', + 'DropEnumType' ) diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index f69a6e0108..8c676a39c5 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -1,5 +1,6 @@ # postgresql/base.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -47,24 +48,28 @@ --------------------------- All Postgresql dialects support setting of transaction isolation level -both via a dialect-specific parameter ``isolation_level`` +both via a dialect-specific parameter :paramref:`.create_engine.isolation_level` accepted by :func:`.create_engine`, -as well as the ``isolation_level`` argument as passed to :meth:`.Connection.execution_options`. -When using a non-psycopg2 dialect, this feature works by issuing the -command ``SET SESSION CHARACTERISTICS AS TRANSACTION ISOLATION LEVEL -`` for each new connection. +as well as the :paramref:`.Connection.execution_options.isolation_level` argument as passed to +:meth:`.Connection.execution_options`. When using a non-psycopg2 dialect, +this feature works by issuing the command +``SET SESSION CHARACTERISTICS AS TRANSACTION ISOLATION LEVEL `` for +each new connection. For the special AUTOCOMMIT isolation level, DBAPI-specific +techniques are used. To set isolation level using :func:`.create_engine`:: engine = create_engine( - "postgresql+pg8000://scott:tiger@localhost/test", - isolation_level="READ UNCOMMITTED" - ) + "postgresql+pg8000://scott:tiger@localhost/test", + isolation_level="READ UNCOMMITTED" + ) To set using per-connection execution options:: connection = engine.connect() - connection = connection.execution_options(isolation_level="READ COMMITTED") + connection = connection.execution_options( + isolation_level="READ COMMITTED" + ) Valid values for ``isolation_level`` include: @@ -72,9 +77,13 @@ * ``READ UNCOMMITTED`` * ``REPEATABLE READ`` * ``SERIALIZABLE`` +* ``AUTOCOMMIT`` - on psycopg2 / pg8000 only + +.. seealso:: + + :ref:`psycopg2_isolation_level` -The :mod:`~sqlalchemy.dialects.postgresql.psycopg2` dialect also offers the special level ``AUTOCOMMIT``. See -:ref:`psycopg2_isolation_level` for details. + :ref:`pg8000_isolation_level` .. _postgresql_schema_reflection: @@ -85,12 +94,13 @@ :paramref:`.Table.schema` argument, or alternatively the :paramref:`.MetaData.reflect.schema` argument determines which schema will be searched for the table or tables. The reflected :class:`.Table` objects -will in all cases retain this ``.schema`` attribute as was specified. However, -with regards to tables which these :class:`.Table` objects refer to via -foreign key constraint, a decision must be made as to how the ``.schema`` +will in all cases retain this ``.schema`` attribute as was specified. +However, with regards to tables which these :class:`.Table` objects refer to +via foreign key constraint, a decision must be made as to how the ``.schema`` is represented in those remote tables, in the case where that remote schema name is also a member of the current -`Postgresql search path `_. +`Postgresql search path +`_. By default, the Postgresql dialect mimics the behavior encouraged by Postgresql's own ``pg_get_constraintdef()`` builtin procedure. This function @@ -107,7 +117,8 @@ CREATE TABLE test=> SET search_path TO public, test_schema; test=> SELECT pg_catalog.pg_get_constraintdef(r.oid, true) FROM - test-> pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace + test-> pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n + test-> ON n.oid = c.relnamespace test-> JOIN pg_catalog.pg_constraint r ON c.oid = r.conrelid test-> WHERE c.relname='referring' AND r.contype = 'f' test-> ; @@ -116,10 +127,11 @@ FOREIGN KEY (referred_id) REFERENCES referred(id) (1 row) -Above, we created a table ``referred`` as a member of the remote schema ``test_schema``, however -when we added ``test_schema`` to the PG ``search_path`` and then asked ``pg_get_constraintdef()`` -for the ``FOREIGN KEY`` syntax, ``test_schema`` was not included in the -output of the function. +Above, we created a table ``referred`` as a member of the remote schema +``test_schema``, however when we added ``test_schema`` to the +PG ``search_path`` and then asked ``pg_get_constraintdef()`` for the +``FOREIGN KEY`` syntax, ``test_schema`` was not included in the output of +the function. On the other hand, if we set the search path back to the typical default of ``public``:: @@ -131,7 +143,8 @@ schema-qualified name for us:: test=> SELECT pg_catalog.pg_get_constraintdef(r.oid, true) FROM - test-> pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace + test-> pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n + test-> ON n.oid = c.relnamespace test-> JOIN pg_catalog.pg_constraint r ON c.oid = r.conrelid test-> WHERE c.relname='referring' AND r.contype = 'f'; pg_get_constraintdef @@ -149,7 +162,8 @@ >>> with engine.connect() as conn: ... conn.execute("SET search_path TO test_schema, public") ... meta = MetaData() - ... referring = Table('referring', meta, autoload=True, autoload_with=conn) + ... referring = Table('referring', meta, + ... autoload=True, autoload_with=conn) ... @@ -159,16 +173,18 @@ >>> meta.tables['referred'].schema is None True -To alter the behavior of reflection such that the referred schema is maintained -regardless of the ``search_path`` setting, use the ``postgresql_ignore_search_path`` -option, which can be specified as a dialect-specific argument to both -:class:`.Table` as well as :meth:`.MetaData.reflect`:: +To alter the behavior of reflection such that the referred schema is +maintained regardless of the ``search_path`` setting, use the +``postgresql_ignore_search_path`` option, which can be specified as a +dialect-specific argument to both :class:`.Table` as well as +:meth:`.MetaData.reflect`:: >>> with engine.connect() as conn: ... conn.execute("SET search_path TO test_schema, public") ... meta = MetaData() - ... referring = Table('referring', meta, autoload=True, autoload_with=conn, - ... postgresql_ignore_search_path=True) + ... referring = Table('referring', meta, autoload=True, + ... autoload_with=conn, + ... postgresql_ignore_search_path=True) ... @@ -179,29 +195,33 @@ .. sidebar:: Best Practices for Postgresql Schema reflection - The description of Postgresql schema reflection behavior is complex, and is - the product of many years of dealing with widely varied use cases and user preferences. - But in fact, there's no need to understand any of it if you just stick to the simplest - use pattern: leave the ``search_path`` set to its default of ``public`` only, never refer - to the name ``public`` as an explicit schema name otherwise, and - refer to all other schema names explicitly when building - up a :class:`.Table` object. The options described here are only for those users - who can't, or prefer not to, stay within these guidelines. - -Note that **in all cases**, the "default" schema is always reflected as ``None``. -The "default" schema on Postgresql is that which is returned by the -Postgresql ``current_schema()`` function. On a typical Postgresql installation, -this is the name ``public``. So a table that refers to another which is -in the ``public`` (i.e. default) schema will always have the ``.schema`` attribute -set to ``None``. + The description of Postgresql schema reflection behavior is complex, and + is the product of many years of dealing with widely varied use cases and + user preferences. But in fact, there's no need to understand any of it if + you just stick to the simplest use pattern: leave the ``search_path`` set + to its default of ``public`` only, never refer to the name ``public`` as + an explicit schema name otherwise, and refer to all other schema names + explicitly when building up a :class:`.Table` object. The options + described here are only for those users who can't, or prefer not to, stay + within these guidelines. + +Note that **in all cases**, the "default" schema is always reflected as +``None``. The "default" schema on Postgresql is that which is returned by the +Postgresql ``current_schema()`` function. On a typical Postgresql +installation, this is the name ``public``. So a table that refers to another +which is in the ``public`` (i.e. default) schema will always have the +``.schema`` attribute set to ``None``. .. versionadded:: 0.9.2 Added the ``postgresql_ignore_search_path`` - dialect-level option accepted by :class:`.Table` and :meth:`.MetaData.reflect`. + dialect-level option accepted by :class:`.Table` and + :meth:`.MetaData.reflect`. .. seealso:: - `The Schema Search Path `_ - on the Postgresql website. + `The Schema Search Path + `_ + - on the Postgresql website. INSERT/UPDATE...RETURNING ------------------------- @@ -244,7 +264,7 @@ The Postgresql text search functions such as ``to_tsquery()`` and ``to_tsvector()`` are available -explicitly using the standard :attr:`.func` construct. For example:: +explicitly using the standard :data:`.func` construct. For example:: select([ func.to_tsvector('fat cats ate rats').match('cat & rat') @@ -264,6 +284,47 @@ SELECT CAST('some text' AS TSVECTOR) AS anon_1 +Full Text Searches in Postgresql are influenced by a combination of: the +PostgresSQL setting of ``default_text_search_config``, the ``regconfig`` used +to build the GIN/GiST indexes, and the ``regconfig`` optionally passed in +during a query. + +When performing a Full Text Search against a column that has a GIN or +GiST index that is already pre-computed (which is common on full text +searches) one may need to explicitly pass in a particular PostgresSQL +``regconfig`` value to ensure the query-planner utilizes the index and does +not re-compute the column on demand. + +In order to provide for this explicit query planning, or to use different +search strategies, the ``match`` method accepts a ``postgresql_regconfig`` +keyword argument:: + + select([mytable.c.id]).where( + mytable.c.title.match('somestring', postgresql_regconfig='english') + ) + +Emits the equivalent of:: + + SELECT mytable.id FROM mytable + WHERE mytable.title @@ to_tsquery('english', 'somestring') + +One can also specifically pass in a `'regconfig'` value to the +``to_tsvector()`` command as the initial argument:: + + select([mytable.c.id]).where( + func.to_tsvector('english', mytable.c.title )\ + .match('somestring', postgresql_regconfig='english') + ) + +produces a statement equivalent to:: + + SELECT mytable.id FROM mytable + WHERE to_tsvector('english', mytable.title) @@ + to_tsquery('english', 'somestring') + +It is recommended that you use the ``EXPLAIN ANALYZE...`` tool from +PostgresSQL to ensure that you are generating queries with SQLAlchemy that +take full advantage of any indexes you may have created for full text search. FROM ONLY ... ------------------------ @@ -338,13 +399,179 @@ underlying CREATE INDEX command, so it *must* be a valid index type for your version of PostgreSQL. +.. _postgresql_index_storage: + +Index Storage Parameters +^^^^^^^^^^^^^^^^^^^^^^^^ + +PostgreSQL allows storage parameters to be set on indexes. The storage +parameters available depend on the index method used by the index. Storage +parameters can be specified on :class:`.Index` using the ``postgresql_with`` +keyword argument:: + + Index('my_index', my_table.c.data, postgresql_with={"fillfactor": 50}) + +.. versionadded:: 1.0.6 + +.. _postgresql_index_concurrently: + +Indexes with CONCURRENTLY +^^^^^^^^^^^^^^^^^^^^^^^^^ + +The Postgresql index option CONCURRENTLY is supported by passing the +flag ``postgresql_concurrently`` to the :class:`.Index` construct:: + + tbl = Table('testtbl', m, Column('data', Integer)) + + idx1 = Index('test_idx1', tbl.c.data, postgresql_concurrently=True) + +The above index construct will render SQL as:: + + CREATE INDEX CONCURRENTLY test_idx1 ON testtbl (data) + +.. versionadded:: 0.9.9 + +.. _postgresql_index_reflection: + +Postgresql Index Reflection +--------------------------- + +The Postgresql database creates a UNIQUE INDEX implicitly whenever the +UNIQUE CONSTRAINT construct is used. When inspecting a table using +:class:`.Inspector`, the :meth:`.Inspector.get_indexes` +and the :meth:`.Inspector.get_unique_constraints` will report on these +two constructs distinctly; in the case of the index, the key +``duplicates_constraint`` will be present in the index entry if it is +detected as mirroring a constraint. When performing reflection using +``Table(..., autoload=True)``, the UNIQUE INDEX is **not** returned +in :attr:`.Table.indexes` when it is detected as mirroring a +:class:`.UniqueConstraint` in the :attr:`.Table.constraints` collection. + +.. versionchanged:: 1.0.0 - :class:`.Table` reflection now includes + :class:`.UniqueConstraint` objects present in the :attr:`.Table.constraints` + collection; the Postgresql backend will no longer include a "mirrored" + :class:`.Index` construct in :attr:`.Table.indexes` if it is detected + as corresponding to a unique constraint. + +Special Reflection Options +-------------------------- + +The :class:`.Inspector` used for the Postgresql backend is an instance +of :class:`.PGInspector`, which offers additional methods:: + + from sqlalchemy import create_engine, inspect + + engine = create_engine("postgresql+psycopg2://localhost/test") + insp = inspect(engine) # will be a PGInspector + + print(insp.get_enums()) + +.. autoclass:: PGInspector + :members: + +.. _postgresql_table_options: + +PostgreSQL Table Options +------------------------- + +Several options for CREATE TABLE are supported directly by the PostgreSQL +dialect in conjunction with the :class:`.Table` construct: + +* ``TABLESPACE``:: + + Table("some_table", metadata, ..., postgresql_tablespace='some_tablespace') + +* ``ON COMMIT``:: + + Table("some_table", metadata, ..., postgresql_on_commit='PRESERVE ROWS') + +* ``WITH OIDS``:: + + Table("some_table", metadata, ..., postgresql_with_oids=True) + +* ``WITHOUT OIDS``:: + + Table("some_table", metadata, ..., postgresql_with_oids=False) + +* ``INHERITS``:: + + Table("some_table", metadata, ..., postgresql_inherits="some_supertable") + + Table("some_table", metadata, ..., postgresql_inherits=("t1", "t2", ...)) + +.. versionadded:: 1.0.0 + +.. seealso:: + + `Postgresql CREATE TABLE options + `_ + +ENUM Types +---------- + +Postgresql has an independently creatable TYPE structure which is used +to implement an enumerated type. This approach introduces significant +complexity on the SQLAlchemy side in terms of when this type should be +CREATED and DROPPED. The type object is also an independently reflectable +entity. The following sections should be consulted: + +* :class:`.postgresql.ENUM` - DDL and typing support for ENUM. + +* :meth:`.PGInspector.get_enums` - retrieve a listing of current ENUM types + +* :meth:`.postgresql.ENUM.create` , :meth:`.postgresql.ENUM.drop` - individual + CREATE and DROP commands for ENUM. + +.. _postgresql_array_of_enum: + +Using ENUM with ARRAY +^^^^^^^^^^^^^^^^^^^^^ + +The combination of ENUM and ARRAY is not directly supported by backend +DBAPIs at this time. In order to send and receive an ARRAY of ENUM, +use the following workaround type:: + + class ArrayOfEnum(ARRAY): + + def bind_expression(self, bindvalue): + return sa.cast(bindvalue, self) + + def result_processor(self, dialect, coltype): + super_rp = super(ArrayOfEnum, self).result_processor( + dialect, coltype) + + def handle_raw_string(value): + inner = re.match(r"^{(.*)}$", value).group(1) + return inner.split(",") if inner else [] + + def process(value): + if value is None: + return None + return super_rp(handle_raw_string(value)) + return process + +E.g.:: + + Table( + 'mydata', metadata, + Column('id', Integer, primary_key=True), + Column('data', ArrayOfEnum(ENUM('a', 'b, 'c', name='myenum'))) + + ) + +This type is not included as a built-in type as it would be incompatible +with a DBAPI that suddenly decides to support ARRAY of ENUM directly in +a new version. + """ from collections import defaultdict import re +import datetime as dt + from ... import sql, schema, exc, util from ...engine import default, reflection -from ...sql import compiler, expression, operators +from ...sql import compiler, expression, operators, default_comparator from ... import types as sqltypes try: @@ -353,26 +580,26 @@ _python_UUID = None from sqlalchemy.types import INTEGER, BIGINT, SMALLINT, VARCHAR, \ - CHAR, TEXT, FLOAT, NUMERIC, \ - DATE, BOOLEAN, REAL + CHAR, TEXT, FLOAT, NUMERIC, \ + DATE, BOOLEAN, REAL RESERVED_WORDS = set( ["all", "analyse", "analyze", "and", "any", "array", "as", "asc", - "asymmetric", "both", "case", "cast", "check", "collate", "column", - "constraint", "create", "current_catalog", "current_date", - "current_role", "current_time", "current_timestamp", "current_user", - "default", "deferrable", "desc", "distinct", "do", "else", "end", - "except", "false", "fetch", "for", "foreign", "from", "grant", "group", - "having", "in", "initially", "intersect", "into", "leading", "limit", - "localtime", "localtimestamp", "new", "not", "null", "of", "off", "offset", - "old", "on", "only", "or", "order", "placing", "primary", "references", - "returning", "select", "session_user", "some", "symmetric", "table", - "then", "to", "trailing", "true", "union", "unique", "user", "using", - "variadic", "when", "where", "window", "with", "authorization", - "between", "binary", "cross", "current_schema", "freeze", "full", - "ilike", "inner", "is", "isnull", "join", "left", "like", "natural", - "notnull", "outer", "over", "overlaps", "right", "similar", "verbose" - ]) + "asymmetric", "both", "case", "cast", "check", "collate", "column", + "constraint", "create", "current_catalog", "current_date", + "current_role", "current_time", "current_timestamp", "current_user", + "default", "deferrable", "desc", "distinct", "do", "else", "end", + "except", "false", "fetch", "for", "foreign", "from", "grant", "group", + "having", "in", "initially", "intersect", "into", "leading", "limit", + "localtime", "localtimestamp", "new", "not", "null", "of", "off", + "offset", "old", "on", "only", "or", "order", "placing", "primary", + "references", "returning", "select", "session_user", "some", "symmetric", + "table", "then", "to", "trailing", "true", "union", "unique", "user", + "using", "variadic", "when", "where", "window", "with", "authorization", + "between", "binary", "cross", "current_schema", "freeze", "full", + "ilike", "inner", "is", "isnull", "join", "left", "like", "natural", + "notnull", "outer", "over", "overlaps", "right", "similar", "verbose" + ]) _DECIMAL_TYPES = (1231, 1700) _FLOAT_TYPES = (700, 701, 1021, 1022) @@ -402,19 +629,32 @@ class MACADDR(sqltypes.TypeEngine): PGMacAddr = MACADDR +class OID(sqltypes.TypeEngine): + + """Provide the Postgresql OID type. + + .. versionadded:: 0.9.5 + + """ + __visit_name__ = "OID" + + class TIMESTAMP(sqltypes.TIMESTAMP): + def __init__(self, timezone=False, precision=None): super(TIMESTAMP, self).__init__(timezone=timezone) self.precision = precision class TIME(sqltypes.TIME): + def __init__(self, timezone=False, precision=None): super(TIME, self).__init__(timezone=timezone) self.precision = precision class INTERVAL(sqltypes.TypeEngine): + """Postgresql INTERVAL type. The INTERVAL type may not be supported on all DBAPIs. @@ -434,6 +674,10 @@ def _adapt_from_generic_interval(cls, interval): def _type_affinity(self): return sqltypes.Interval + @property + def python_type(self): + return dt.timedelta + PGInterval = INTERVAL @@ -453,6 +697,7 @@ def __init__(self, length=None, varying=False): class UUID(sqltypes.TypeEngine): + """Postgresql UUID type. Represents the UUID column type, interpreting @@ -476,7 +721,8 @@ def __init__(self, as_uuid=False): """ if as_uuid and _python_UUID is None: raise NotImplementedError( - "This version of Python does not support the native UUID type." + "This version of Python does not support " + "the native UUID type." ) self.as_uuid = as_uuid @@ -502,7 +748,9 @@ def process(value): PGUuid = UUID + class TSVECTOR(sqltypes.TypeEngine): + """The :class:`.postgresql.TSVECTOR` type implements the Postgresql text search type TSVECTOR. @@ -519,21 +767,21 @@ class TSVECTOR(sqltypes.TypeEngine): __visit_name__ = 'TSVECTOR' - class _Slice(expression.ColumnElement): __visit_name__ = 'slice' type = sqltypes.NULLTYPE def __init__(self, slice_, source_comparator): - self.start = source_comparator._check_literal( - source_comparator.expr, - operators.getitem, slice_.start) - self.stop = source_comparator._check_literal( - source_comparator.expr, - operators.getitem, slice_.stop) + self.start = default_comparator._check_literal( + source_comparator.expr, + operators.getitem, slice_.start) + self.stop = default_comparator._check_literal( + source_comparator.expr, + operators.getitem, slice_.stop) class Any(expression.ColumnElement): + """Represent the clause ``left operator ANY (right)``. ``right`` must be an array expression. @@ -554,6 +802,7 @@ def __init__(self, left, right, operator=operators.eq): class All(expression.ColumnElement): + """Represent the clause ``left operator ALL (right)``. ``right`` must be an array expression. @@ -574,6 +823,7 @@ def __init__(self, left, right, operator=operators.eq): class array(expression.Tuple): + """A Postgresql ARRAY literal. This is used to produce ARRAY literals in SQL expressions, e.g.:: @@ -613,9 +863,9 @@ def __init__(self, clauses, **kw): self.type = ARRAY(self.type) def _bind_param(self, operator, obj): - return array(*[ + return array([ expression.BindParameter(None, o, _compared_to_operator=operator, - _compared_to_type=self.type, unique=True) + _compared_to_type=self.type, unique=True) for o in obj ]) @@ -624,6 +874,7 @@ def self_group(self, against=None): class ARRAY(sqltypes.Concatenable, sqltypes.TypeEngine): + """Postgresql ARRAY type. Represents values as Python lists. @@ -677,6 +928,16 @@ class ARRAY(sqltypes.Concatenable, sqltypes.TypeEngine): mytable.c.data[2:7]: [1, 2, 3] }) + .. note:: + + Multi-dimensional support for the ``[]`` operator is not supported + in SQLAlchemy 1.0. Please use the :func:`.type_coerce` function + to cast an intermediary expression to ARRAY again as a workaround:: + + expr = type_coerce(my_array_column[5], ARRAY(Integer))[6] + + Multi-dimensional support will be provided in a future release. + :class:`.ARRAY` provides special methods for containment operations, e.g.:: @@ -691,6 +952,10 @@ class ARRAY(sqltypes.Concatenable, sqltypes.TypeEngine): The :class:`.ARRAY` type may not be supported on all DBAPIs. It is known to work on psycopg2 and not pg8000. + Additionally, the :class:`.ARRAY` type does not work directly in + conjunction with the :class:`.ENUM` type. For a workaround, see the + special type at :ref:`postgresql_array_of_enum`. + See also: :class:`.postgresql.array` - produce a literal array value. @@ -699,16 +964,27 @@ class ARRAY(sqltypes.Concatenable, sqltypes.TypeEngine): __visit_name__ = 'ARRAY' class Comparator(sqltypes.Concatenable.Comparator): + """Define comparison operations for :class:`.ARRAY`.""" def __getitem__(self, index): + shift_indexes = 1 if self.expr.type.zero_indexes else 0 if isinstance(index, slice): + if shift_indexes: + index = slice( + index.start + shift_indexes, + index.stop + shift_indexes, + index.step + ) index = _Slice(index, self) return_type = self.type else: + index += shift_indexes return_type = self.type.item_type - return self._binary_operate(self.expr, operators.getitem, index, - result_type=return_type) + + return default_comparator._binary_operate( + self.expr, operators.getitem, index, + result_type=return_type) def any(self, other, operator=operators.eq): """Return ``other operator ANY (array)`` clause. @@ -797,7 +1073,8 @@ def _adapt_expression(self, op, other_comparator): comparator_factory = Comparator - def __init__(self, item_type, as_tuple=False, dimensions=None): + def __init__(self, item_type, as_tuple=False, dimensions=None, + zero_indexes=False): """Construct an ARRAY. E.g.:: @@ -824,15 +1101,23 @@ def __init__(self, item_type, as_tuple=False, dimensions=None): meaning they can store any number of dimensions no matter how they were declared. + :param zero_indexes=False: when True, index values will be converted + between Python zero-based and Postgresql one-based indexes, e.g. + a value of one will be added to all index values before passing + to the database. + + .. versionadded:: 0.9.5 + """ if isinstance(item_type, ARRAY): raise ValueError("Do not nest ARRAY types; ARRAY(basetype) " - "handles multi-dimensional arrays of basetype") + "handles multi-dimensional arrays of basetype") if isinstance(item_type, type): item_type = item_type() self.item_type = item_type self.as_tuple = as_tuple self.dimensions = dimensions + self.zero_indexes = zero_indexes @property def python_type(self): @@ -845,77 +1130,133 @@ def _proc_array(self, arr, itemproc, dim, collection): if dim is None: arr = list(arr) if dim == 1 or dim is None and ( - # this has to be (list, tuple), or at least - # not hasattr('__iter__'), since Py3K strings - # etc. have __iter__ - not arr or not isinstance(arr[0], (list, tuple))): + # this has to be (list, tuple), or at least + # not hasattr('__iter__'), since Py3K strings + # etc. have __iter__ + not arr or not isinstance(arr[0], (list, tuple))): if itemproc: return collection(itemproc(x) for x in arr) else: return collection(arr) else: return collection( - self._proc_array( - x, itemproc, - dim - 1 if dim is not None else None, - collection) - for x in arr - ) + self._proc_array( + x, itemproc, + dim - 1 if dim is not None else None, + collection) + for x in arr + ) def bind_processor(self, dialect): item_proc = self.item_type.\ - dialect_impl(dialect).\ - bind_processor(dialect) + dialect_impl(dialect).\ + bind_processor(dialect) def process(value): if value is None: return value else: return self._proc_array( - value, - item_proc, - self.dimensions, - list) + value, + item_proc, + self.dimensions, + list) return process def result_processor(self, dialect, coltype): item_proc = self.item_type.\ - dialect_impl(dialect).\ - result_processor(dialect, coltype) + dialect_impl(dialect).\ + result_processor(dialect, coltype) def process(value): if value is None: return value else: return self._proc_array( - value, - item_proc, - self.dimensions, - tuple if self.as_tuple else list) + value, + item_proc, + self.dimensions, + tuple if self.as_tuple else list) return process PGArray = ARRAY class ENUM(sqltypes.Enum): + """Postgresql ENUM type. This is a subclass of :class:`.types.Enum` which includes - support for PG's ``CREATE TYPE``. - - :class:`~.postgresql.ENUM` is used automatically when - using the :class:`.types.Enum` type on PG assuming - the ``native_enum`` is left as ``True``. However, the - :class:`~.postgresql.ENUM` class can also be instantiated - directly in order to access some additional Postgresql-specific - options, namely finer control over whether or not - ``CREATE TYPE`` should be emitted. - - Note that both :class:`.types.Enum` as well as - :class:`~.postgresql.ENUM` feature create/drop - methods; the base :class:`.types.Enum` type ultimately - delegates to the :meth:`~.postgresql.ENUM.create` and - :meth:`~.postgresql.ENUM.drop` methods present here. + support for PG's ``CREATE TYPE`` and ``DROP TYPE``. + + When the builtin type :class:`.types.Enum` is used and the + :paramref:`.Enum.native_enum` flag is left at its default of + True, the Postgresql backend will use a :class:`.postgresql.ENUM` + type as the implementation, so the special create/drop rules + will be used. + + The create/drop behavior of ENUM is necessarily intricate, due to the + awkward relationship the ENUM type has in relationship to the + parent table, in that it may be "owned" by just a single table, or + may be shared among many tables. + + When using :class:`.types.Enum` or :class:`.postgresql.ENUM` + in an "inline" fashion, the ``CREATE TYPE`` and ``DROP TYPE`` is emitted + corresponding to when the :meth:`.Table.create` and :meth:`.Table.drop` + methods are called:: + + table = Table('sometable', metadata, + Column('some_enum', ENUM('a', 'b', 'c', name='myenum')) + ) + + table.create(engine) # will emit CREATE ENUM and CREATE TABLE + table.drop(engine) # will emit DROP TABLE and DROP ENUM + + To use a common enumerated type between multiple tables, the best + practice is to declare the :class:`.types.Enum` or + :class:`.postgresql.ENUM` independently, and associate it with the + :class:`.MetaData` object itself:: + + my_enum = ENUM('a', 'b', 'c', name='myenum', metadata=metadata) + + t1 = Table('sometable_one', metadata, + Column('some_enum', myenum) + ) + + t2 = Table('sometable_two', metadata, + Column('some_enum', myenum) + ) + + When this pattern is used, care must still be taken at the level + of individual table creates. Emitting CREATE TABLE without also + specifying ``checkfirst=True`` will still cause issues:: + + t1.create(engine) # will fail: no such type 'myenum' + + If we specify ``checkfirst=True``, the individual table-level create + operation will check for the ``ENUM`` and create if not exists:: + + # will check if enum exists, and emit CREATE TYPE if not + t1.create(engine, checkfirst=True) + + When using a metadata-level ENUM type, the type will always be created + and dropped if either the metadata-wide create/drop is called:: + + metadata.create_all(engine) # will emit CREATE TYPE + metadata.drop_all(engine) # will emit DROP TYPE + + The type can also be created and dropped directly:: + + my_enum.create(engine) + my_enum.drop(engine) + + .. versionchanged:: 1.0.0 The Postgresql :class:`.postgresql.ENUM` type + now behaves more strictly with regards to CREATE/DROP. A metadata-level + ENUM type will only be created and dropped at the metadata level, + not the table level, with the exception of + ``table.create(checkfirst=True)``. + The ``table.drop()`` call will now emit a DROP TYPE for a table-level + enumerated type. """ @@ -971,7 +1312,8 @@ def create(self, bind=None, checkfirst=True): return if not checkfirst or \ - not bind.dialect.has_type(bind, self.name, schema=self.schema): + not bind.dialect.has_type( + bind, self.name, schema=self.schema): bind.execute(CreateEnumType(self)) def drop(self, bind=None, checkfirst=True): @@ -993,7 +1335,7 @@ def drop(self, bind=None, checkfirst=True): return if not checkfirst or \ - bind.dialect.has_type(bind, self.name, schema=self.schema): + bind.dialect.has_type(bind, self.name, schema=self.schema): bind.execute(DropEnumType(self)) def _check_for_name_in_memos(self, checkfirst, kw): @@ -1020,12 +1362,20 @@ def _check_for_name_in_memos(self, checkfirst, kw): return False def _on_table_create(self, target, bind, checkfirst, **kw): - if not self._check_for_name_in_memos(checkfirst, kw): + if checkfirst or ( + not self.metadata and + not kw.get('_is_metadata_operation', False)) and \ + not self._check_for_name_in_memos(checkfirst, kw): self.create(bind=bind, checkfirst=checkfirst) + def _on_table_drop(self, target, bind, checkfirst, **kw): + if not self.metadata and \ + not kw.get('_is_metadata_operation', False) and \ + not self._check_for_name_in_memos(checkfirst, kw): + self.drop(bind=bind, checkfirst=checkfirst) + def _on_metadata_create(self, target, bind, checkfirst, **kw): - if self.metadata is not None and \ - not self._check_for_name_in_memos(checkfirst, kw): + if not self._check_for_name_in_memos(checkfirst, kw): self.create(bind=bind, checkfirst=checkfirst) def _on_metadata_drop(self, target, bind, checkfirst, **kw): @@ -1055,6 +1405,7 @@ def _on_metadata_drop(self, target, bind, checkfirst, **kw): 'bit': BIT, 'bit varying': BIT, 'macaddr': MACADDR, + 'oid': OID, 'double precision': DOUBLE_PRECISION, 'timestamp': TIMESTAMP, 'timestamp with time zone': TIMESTAMP, @@ -1068,7 +1419,7 @@ def _on_metadata_drop(self, target, bind, checkfirst, **kw): 'interval': INTERVAL, 'interval year to month': INTERVAL, 'interval day to second': INTERVAL, - 'tsvector' : TSVECTOR + 'tsvector': TSVECTOR } @@ -1079,9 +1430,9 @@ def visit_array(self, element, **kw): def visit_slice(self, element, **kw): return "%s:%s" % ( - self.process(element.start, **kw), - self.process(element.stop, **kw), - ) + self.process(element.start, **kw), + self.process(element.stop, **kw), + ) def visit_any(self, element, **kw): return "%s%sANY (%s)" % ( @@ -1099,21 +1450,32 @@ def visit_all(self, element, **kw): def visit_getitem_binary(self, binary, operator, **kw): return "%s[%s]" % ( - self.process(binary.left, **kw), - self.process(binary.right, **kw) - ) + self.process(binary.left, **kw), + self.process(binary.right, **kw) + ) def visit_match_op_binary(self, binary, operator, **kw): + if "postgresql_regconfig" in binary.modifiers: + regconfig = self.render_literal_value( + binary.modifiers['postgresql_regconfig'], + sqltypes.STRINGTYPE) + if regconfig: + return "%s @@ to_tsquery(%s, %s)" % ( + self.process(binary.left, **kw), + regconfig, + self.process(binary.right, **kw) + ) return "%s @@ to_tsquery(%s)" % ( - self.process(binary.left, **kw), - self.process(binary.right, **kw)) + self.process(binary.left, **kw), + self.process(binary.right, **kw) + ) def visit_ilike_op_binary(self, binary, operator, **kw): escape = binary.modifiers.get("escape", None) return '%s ILIKE %s' % \ - (self.process(binary.left, **kw), - self.process(binary.right, **kw)) \ + (self.process(binary.left, **kw), + self.process(binary.right, **kw)) \ + ( ' ESCAPE ' + self.render_literal_value(escape, sqltypes.STRINGTYPE) @@ -1123,8 +1485,8 @@ def visit_ilike_op_binary(self, binary, operator, **kw): def visit_notilike_op_binary(self, binary, operator, **kw): escape = binary.modifiers.get("escape", None) return '%s NOT ILIKE %s' % \ - (self.process(binary.left, **kw), - self.process(binary.right, **kw)) \ + (self.process(binary.left, **kw), + self.process(binary.right, **kw)) \ + ( ' ESCAPE ' + self.render_literal_value(escape, sqltypes.STRINGTYPE) @@ -1141,14 +1503,14 @@ def render_literal_value(self, value, type_): def visit_sequence(self, seq): return "nextval('%s')" % self.preparer.format_sequence(seq) - def limit_clause(self, select): + def limit_clause(self, select, **kw): text = "" - if select._limit is not None: - text += " \n LIMIT " + self.process(sql.literal(select._limit)) - if select._offset is not None: - if select._limit is None: + if select._limit_clause is not None: + text += " \n LIMIT " + self.process(select._limit_clause, **kw) + if select._offset_clause is not None: + if select._limit_clause is None: text += " \n LIMIT ALL" - text += " OFFSET " + self.process(sql.literal(select._offset)) + text += " OFFSET " + self.process(select._offset_clause, **kw) return text def format_from_hint_text(self, sqltext, table, hint, iscrud): @@ -1156,7 +1518,7 @@ def format_from_hint_text(self, sqltext, table, hint, iscrud): raise exc.CompileError("Unrecognized hint: %r" % hint) return "ONLY " + sqltext - def get_select_precolumns(self, select): + def get_select_precolumns(self, select, **kw): if select._distinct is not False: if select._distinct is True: return "DISTINCT " @@ -1165,11 +1527,12 @@ def get_select_precolumns(self, select): [self.process(col) for col in select._distinct] ) + ") " else: - return "DISTINCT ON (" + self.process(select._distinct) + ") " + return "DISTINCT ON (" + \ + self.process(select._distinct, **kw) + ") " else: return "" - def for_update_clause(self, select): + def for_update_clause(self, select, **kw): if select._for_update_arg.read: tmp = " FOR SHARE" @@ -1178,12 +1541,12 @@ def for_update_clause(self, select): if select._for_update_arg.of: tables = util.OrderedSet( - c.table if isinstance(c, expression.ColumnClause) - else c for c in select._for_update_arg.of) + c.table if isinstance(c, expression.ColumnClause) + else c for c in select._for_update_arg.of) tmp += " OF " + ", ".join( - self.process(table, ashint=True) - for table in tables - ) + self.process(table, ashint=True, use_schema=False, **kw) + for table in tables + ) if select._for_update_arg.nowait: tmp += " NOWAIT" @@ -1193,13 +1556,12 @@ def for_update_clause(self, select): def returning_clause(self, stmt, returning_cols): columns = [ - self._label_select_column(None, c, True, False, {}) - for c in expression._select_iterables(returning_cols) - ] + self._label_select_column(None, c, True, False, {}) + for c in expression._select_iterables(returning_cols) + ] return 'RETURNING ' + ', '.join(columns) - def visit_substring_func(self, func, **kw): s = self.process(func.clauses.clauses[0], **kw) start = self.process(func.clauses.clauses[1], **kw) @@ -1209,7 +1571,9 @@ def visit_substring_func(self, func, **kw): else: return "SUBSTRING(%s FROM %s)" % (s, start) + class PGDDLCompiler(compiler.DDLCompiler): + def get_column_specification(self, column, **kwargs): colspec = self.preparer.format_column(column) @@ -1232,7 +1596,8 @@ def get_column_specification(self, column, **kwargs): else: colspec += " SERIAL" else: - colspec += " " + self.dialect.type_compiler.process(column.type) + colspec += " " + self.dialect.type_compiler.process(column.type, + type_expression=column) default = self.get_column_default_string(column) if default is not None: colspec += " DEFAULT " + default @@ -1248,7 +1613,7 @@ def visit_create_enum_type(self, create): self.preparer.format_type(type_), ", ".join( self.sql_compiler.process(sql.literal(e), literal_binds=True) - for e in type_.enums) + for e in type_.enums) ) def visit_drop_enum_type(self, drop): @@ -1265,11 +1630,17 @@ def visit_create_index(self, create): text = "CREATE " if index.unique: text += "UNIQUE " - text += "INDEX %s ON %s " % ( - self._prepared_index_name(index, - include_schema=False), - preparer.format_table(index.table) - ) + text += "INDEX " + + concurrently = index.dialect_options['postgresql']['concurrently'] + if concurrently: + text += "CONCURRENTLY " + + text += "%s ON %s " % ( + self._prepared_index_name(index, + include_schema=False), + preparer.format_table(index.table) + ) using = index.dialect_options['postgresql']['using'] if using: @@ -1280,123 +1651,174 @@ def visit_create_index(self, create): % ( ', '.join([ self.sql_compiler.process( - expr.self_group() - if not isinstance(expr, expression.ColumnClause) - else expr, - include_table=False, literal_binds=True) + - (c.key in ops and (' ' + ops[c.key]) or '') - for expr, c in zip(index.expressions, index.columns)]) - ) + expr.self_group() + if not isinstance(expr, expression.ColumnClause) + else expr, + include_table=False, literal_binds=True) + + ( + (' ' + ops[expr.key]) + if hasattr(expr, 'key') + and expr.key in ops else '' + ) + for expr in index.expressions + ]) + ) + + withclause = index.dialect_options['postgresql']['with'] + + if withclause: + text += " WITH (%s)" % (', '.join( + ['%s = %s' % storage_parameter + for storage_parameter in withclause.items()])) whereclause = index.dialect_options["postgresql"]["where"] if whereclause is not None: where_compiled = self.sql_compiler.process( - whereclause, include_table=False, - literal_binds=True) + whereclause, include_table=False, + literal_binds=True) text += " WHERE " + where_compiled return text - def visit_exclude_constraint(self, constraint): + def visit_exclude_constraint(self, constraint, **kw): text = "" if constraint.name is not None: text += "CONSTRAINT %s " % \ self.preparer.format_constraint(constraint) elements = [] - for c in constraint.columns: - op = constraint.operators[c.name] - elements.append(self.preparer.quote(c.name) + ' WITH '+op) - text += "EXCLUDE USING %s (%s)" % (constraint.using, ', '.join(elements)) + for expr, name, op in constraint._render_exprs: + kw['include_table'] = False + elements.append( + "%s WITH %s" % (self.sql_compiler.process(expr, **kw), op) + ) + text += "EXCLUDE USING %s (%s)" % (constraint.using, + ', '.join(elements)) if constraint.where is not None: text += ' WHERE (%s)' % self.sql_compiler.process( - constraint.where, - literal_binds=True) + constraint.where, + literal_binds=True) text += self.define_constraint_deferrability(constraint) return text + def post_create_table(self, table): + table_opts = [] + pg_opts = table.dialect_options['postgresql'] + + inherits = pg_opts.get('inherits') + if inherits is not None: + if not isinstance(inherits, (list, tuple)): + inherits = (inherits, ) + table_opts.append( + '\n INHERITS ( ' + + ', '.join(self.preparer.quote(name) for name in inherits) + + ' )') + + if pg_opts['with_oids'] is True: + table_opts.append('\n WITH OIDS') + elif pg_opts['with_oids'] is False: + table_opts.append('\n WITHOUT OIDS') + + if pg_opts['on_commit']: + on_commit_options = pg_opts['on_commit'].replace("_", " ").upper() + table_opts.append('\n ON COMMIT %s' % on_commit_options) + + if pg_opts['tablespace']: + tablespace_name = pg_opts['tablespace'] + table_opts.append( + '\n TABLESPACE %s' % self.preparer.quote(tablespace_name) + ) + + return ''.join(table_opts) + class PGTypeCompiler(compiler.GenericTypeCompiler): - def visit_TSVECTOR(self, type): + def visit_TSVECTOR(self, type, **kw): return "TSVECTOR" - def visit_INET(self, type_): + def visit_INET(self, type_, **kw): return "INET" - def visit_CIDR(self, type_): + def visit_CIDR(self, type_, **kw): return "CIDR" - def visit_MACADDR(self, type_): + def visit_MACADDR(self, type_, **kw): return "MACADDR" - def visit_FLOAT(self, type_): + def visit_OID(self, type_, **kw): + return "OID" + + def visit_FLOAT(self, type_, **kw): if not type_.precision: return "FLOAT" else: return "FLOAT(%(precision)s)" % {'precision': type_.precision} - def visit_DOUBLE_PRECISION(self, type_): + def visit_DOUBLE_PRECISION(self, type_, **kw): return "DOUBLE PRECISION" - def visit_BIGINT(self, type_): + def visit_BIGINT(self, type_, **kw): return "BIGINT" - def visit_HSTORE(self, type_): + def visit_HSTORE(self, type_, **kw): return "HSTORE" - def visit_JSON(self, type_): + def visit_JSON(self, type_, **kw): return "JSON" - def visit_INT4RANGE(self, type_): + def visit_JSONB(self, type_, **kw): + return "JSONB" + + def visit_INT4RANGE(self, type_, **kw): return "INT4RANGE" - def visit_INT8RANGE(self, type_): + def visit_INT8RANGE(self, type_, **kw): return "INT8RANGE" - def visit_NUMRANGE(self, type_): + def visit_NUMRANGE(self, type_, **kw): return "NUMRANGE" - def visit_DATERANGE(self, type_): + def visit_DATERANGE(self, type_, **kw): return "DATERANGE" - def visit_TSRANGE(self, type_): + def visit_TSRANGE(self, type_, **kw): return "TSRANGE" - def visit_TSTZRANGE(self, type_): + def visit_TSTZRANGE(self, type_, **kw): return "TSTZRANGE" - def visit_datetime(self, type_): - return self.visit_TIMESTAMP(type_) + def visit_datetime(self, type_, **kw): + return self.visit_TIMESTAMP(type_, **kw) - def visit_enum(self, type_): + def visit_enum(self, type_, **kw): if not type_.native_enum or not self.dialect.supports_native_enum: - return super(PGTypeCompiler, self).visit_enum(type_) + return super(PGTypeCompiler, self).visit_enum(type_, **kw) else: - return self.visit_ENUM(type_) + return self.visit_ENUM(type_, **kw) - def visit_ENUM(self, type_): + def visit_ENUM(self, type_, **kw): return self.dialect.identifier_preparer.format_type(type_) - def visit_TIMESTAMP(self, type_): + def visit_TIMESTAMP(self, type_, **kw): return "TIMESTAMP%s %s" % ( getattr(type_, 'precision', None) and "(%d)" % type_.precision or "", (type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE" ) - def visit_TIME(self, type_): + def visit_TIME(self, type_, **kw): return "TIME%s %s" % ( getattr(type_, 'precision', None) and "(%d)" % type_.precision or "", (type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE" ) - def visit_INTERVAL(self, type_): + def visit_INTERVAL(self, type_, **kw): if type_.precision is not None: return "INTERVAL(%d)" % type_.precision else: return "INTERVAL" - def visit_BIT(self, type_): + def visit_BIT(self, type_, **kw): if type_.varying: compiled = "BIT VARYING" if type_.length is not None: @@ -1405,19 +1827,19 @@ def visit_BIT(self, type_): compiled = "BIT(%d)" % type_.length return compiled - def visit_UUID(self, type_): + def visit_UUID(self, type_, **kw): return "UUID" - def visit_large_binary(self, type_): - return self.visit_BYTEA(type_) + def visit_large_binary(self, type_, **kw): + return self.visit_BYTEA(type_, **kw) - def visit_BYTEA(self, type_): + def visit_BYTEA(self, type_, **kw): return "BYTEA" - def visit_ARRAY(self, type_): + def visit_ARRAY(self, type_, **kw): return self.process(type_.item_type) + ('[]' * (type_.dimensions - if type_.dimensions - is not None else 1)) + if type_.dimensions + is not None else 1)) class PGIdentifierPreparer(compiler.IdentifierPreparer): @@ -1427,7 +1849,7 @@ class PGIdentifierPreparer(compiler.IdentifierPreparer): def _unquote_identifier(self, value): if value[0] == self.initial_quote: value = value[1:-1].\ - replace(self.escape_to_quote, self.escape_quote) + replace(self.escape_to_quote, self.escape_quote) return value def format_type(self, type_, use_schema=True): @@ -1446,11 +1868,45 @@ def __init__(self, conn): reflection.Inspector.__init__(self, conn) def get_table_oid(self, table_name, schema=None): - """Return the oid from `table_name` and `schema`.""" + """Return the OID for the given table name.""" return self.dialect.get_table_oid(self.bind, table_name, schema, info_cache=self.info_cache) + def get_enums(self, schema=None): + """Return a list of ENUM objects. + + Each member is a dictionary containing these fields: + + * name - name of the enum + * schema - the schema name for the enum. + * visible - boolean, whether or not this enum is visible + in the default search path. + * labels - a list of string labels that apply to the enum. + + :param schema: schema name. If None, the default schema + (typically 'public') is used. May also be set to '*' to + indicate load enums for all schemas. + + .. versionadded:: 1.0.0 + + """ + schema = schema or self.default_schema_name + return self.dialect._load_enums(self.bind, schema) + + def get_foreign_table_names(self, schema=None): + """Return a list of FOREIGN TABLE names. + + Behavior is similar to that of :meth:`.Inspector.get_table_names`, + except that the list is limited to those tables tha report a + ``relkind`` value of ``f``. + + .. versionadded:: 1.0.0 + + """ + schema = schema or self.default_schema_name + return self.dialect._get_foreign_table_names(self.bind, schema) + class CreateEnumType(schema._CreateDropBase): __visit_name__ = "create_enum_type" @@ -1461,21 +1917,25 @@ class DropEnumType(schema._CreateDropBase): class PGExecutionContext(default.DefaultExecutionContext): + def fire_sequence(self, seq, type_): - return self._execute_scalar(("select nextval('%s')" % \ - self.dialect.identifier_preparer.format_sequence(seq)), type_) + return self._execute_scalar(( + "select nextval('%s')" % + self.dialect.identifier_preparer.format_sequence(seq)), type_) def get_insert_default(self, column): - if column.primary_key and column is column.table._autoincrement_column: + if column.primary_key and \ + column is column.table._autoincrement_column: if column.server_default and column.server_default.has_argument: # pre-execute passive defaults on primary key columns return self._execute_scalar("select %s" % - column.server_default.arg, column.type) + column.server_default.arg, + column.type) elif (column.default is None or - (column.default.is_sequence and - column.default.optional)): + (column.default.is_sequence and + column.default.optional)): # execute the sequence associated with a SERIAL primary # key column. for non-primary-key SERIAL, the ID just @@ -1494,10 +1954,10 @@ def get_insert_default(self, column): sch = column.table.schema if sch is not None: exc = "select nextval('\"%s\".\"%s\"')" % \ - (sch, seq_name) + (sch, seq_name) else: exc = "select nextval('\"%s\"')" % \ - (seq_name, ) + (seq_name, ) return self._execute_scalar(exc, column.type) @@ -1538,10 +1998,16 @@ class PGDialect(default.DefaultDialect): (schema.Index, { "using": False, "where": None, - "ops": {} + "ops": {}, + "concurrently": False, + "with": {} }), (schema.Table, { - "ignore_search_path": False + "ignore_search_path": False, + "tablespace": None, + "with_oids": None, + "on_commit": None, + "inherits": None }) ] @@ -1550,7 +2016,7 @@ class PGDialect(default.DefaultDialect): _backslash_escapes = True def __init__(self, isolation_level=None, json_serializer=None, - json_deserializer=None, **kwargs): + json_deserializer=None, **kwargs): default.DefaultDialect.__init__(self, **kwargs) self.isolation_level = isolation_level self._json_deserializer = json_deserializer @@ -1559,7 +2025,7 @@ def __init__(self, isolation_level=None, json_serializer=None, def initialize(self, connection): super(PGDialect, self).initialize(connection) self.implicit_returning = self.server_version_info > (8, 2) and \ - self.__dict__.get('implicit_returning', True) + self.__dict__.get('implicit_returning', True) self.supports_native_enum = self.server_version_info >= (8, 3) if not self.supports_native_enum: self.colspecs = self.colspecs.copy() @@ -1572,9 +2038,9 @@ def initialize(self, connection): self.supports_smallserial = self.server_version_info >= (9, 2) self._backslash_escapes = self.server_version_info < (8, 2) or \ - connection.scalar( - "show standard_conforming_strings" - ) == 'off' + connection.scalar( + "show standard_conforming_strings" + ) == 'off' def on_connect(self): if self.isolation_level is not None: @@ -1584,8 +2050,8 @@ def connect(conn): else: return None - _isolation_lookup = set(['SERIALIZABLE', - 'READ UNCOMMITTED', 'READ COMMITTED', 'REPEATABLE READ']) + _isolation_lookup = set(['SERIALIZABLE', 'READ UNCOMMITTED', + 'READ COMMITTED', 'REPEATABLE READ']) def set_isolation_level(self, connection, level): level = level.replace('_', ' ') @@ -1594,7 +2060,7 @@ def set_isolation_level(self, connection, level): "Invalid value '%s' for isolation_level. " "Valid isolation levels for %s are %s" % (level, self.name, ", ".join(self._isolation_lookup)) - ) + ) cursor = connection.cursor() cursor.execute( "SET SESSION CHARACTERISTICS AS TRANSACTION " @@ -1616,10 +2082,10 @@ def do_prepare_twophase(self, connection, xid): connection.execute("PREPARE TRANSACTION '%s'" % xid) def do_rollback_twophase(self, connection, xid, - is_prepared=True, recover=False): + is_prepared=True, recover=False): if is_prepared: if recover: - #FIXME: ugly hack to get out of transaction + # FIXME: ugly hack to get out of transaction # context when committing recoverable transactions # Must find out a way how to make the dbapi not # open a transaction. @@ -1631,7 +2097,7 @@ def do_rollback_twophase(self, connection, xid, self.do_rollback(connection.connection) def do_commit_twophase(self, connection, xid, - is_prepared=True, recover=False): + is_prepared=True, recover=False): if is_prepared: if recover: connection.execute("ROLLBACK") @@ -1643,14 +2109,15 @@ def do_commit_twophase(self, connection, xid, def do_recover_twophase(self, connection): resultset = connection.execute( - sql.text("SELECT gid FROM pg_prepared_xacts")) + sql.text("SELECT gid FROM pg_prepared_xacts")) return [row[0] for row in resultset] def _get_default_schema_name(self, connection): return connection.scalar("select current_schema()") def has_schema(self, connection, schema): - query = "select nspname from pg_namespace where lower(nspname)=:schema" + query = ("select nspname from pg_namespace " + "where lower(nspname)=:schema") cursor = connection.execute( sql.text( query, @@ -1668,25 +2135,28 @@ def has_table(self, connection, table_name, schema=None): if schema is None: cursor = connection.execute( sql.text( - "select relname from pg_class c join pg_namespace n on " - "n.oid=c.relnamespace where n.nspname=current_schema() and " - "relname=:name", - bindparams=[ + "select relname from pg_class c join pg_namespace n on " + "n.oid=c.relnamespace where " + "pg_catalog.pg_table_is_visible(c.oid) " + "and relname=:name", + bindparams=[ sql.bindparam('name', util.text_type(table_name), - type_=sqltypes.Unicode)] + type_=sqltypes.Unicode)] ) ) else: cursor = connection.execute( sql.text( - "select relname from pg_class c join pg_namespace n on " - "n.oid=c.relnamespace where n.nspname=:schema and " - "relname=:name", + "select relname from pg_class c join pg_namespace n on " + "n.oid=c.relnamespace where n.nspname=:schema and " + "relname=:name", bindparams=[ sql.bindparam('name', - util.text_type(table_name), type_=sqltypes.Unicode), + util.text_type(table_name), + type_=sqltypes.Unicode), sql.bindparam('schema', - util.text_type(schema), type_=sqltypes.Unicode)] + util.text_type(schema), + type_=sqltypes.Unicode)] ) ) return bool(cursor.first()) @@ -1701,23 +2171,24 @@ def has_sequence(self, connection, sequence_name, schema=None): "and relname=:name", bindparams=[ sql.bindparam('name', util.text_type(sequence_name), - type_=sqltypes.Unicode) + type_=sqltypes.Unicode) ] ) ) else: cursor = connection.execute( sql.text( - "SELECT relname FROM pg_class c join pg_namespace n on " - "n.oid=c.relnamespace where relkind='S' and " - "n.nspname=:schema and relname=:name", - bindparams=[ - sql.bindparam('name', util.text_type(sequence_name), - type_=sqltypes.Unicode), - sql.bindparam('schema', - util.text_type(schema), type_=sqltypes.Unicode) - ] - ) + "SELECT relname FROM pg_class c join pg_namespace n on " + "n.oid=c.relnamespace where relkind='S' and " + "n.nspname=:schema and relname=:name", + bindparams=[ + sql.bindparam('name', util.text_type(sequence_name), + type_=sqltypes.Unicode), + sql.bindparam('schema', + util.text_type(schema), + type_=sqltypes.Unicode) + ] + ) ) return bool(cursor.first()) @@ -1743,14 +2214,14 @@ def has_type(self, connection, type_name, schema=None): """ query = sql.text(query) query = query.bindparams( - sql.bindparam('typname', - util.text_type(type_name), type_=sqltypes.Unicode), - ) + sql.bindparam('typname', + util.text_type(type_name), type_=sqltypes.Unicode), + ) if schema is not None: query = query.bindparams( - sql.bindparam('nspname', - util.text_type(schema), type_=sqltypes.Unicode), - ) + sql.bindparam('nspname', + util.text_type(schema), type_=sqltypes.Unicode), + ) cursor = connection.execute(query) return bool(cursor.scalar()) @@ -1762,7 +2233,7 @@ def _get_server_version_info(self, connection): v) if not m: raise AssertionError( - "Could not determine version from string '%s'" % v) + "Could not determine version from string '%s'" % v) return tuple([int(x) for x in m.group(1, 2, 3) if x is not None]) @reflection.cache @@ -1784,7 +2255,7 @@ def get_table_oid(self, connection, table_name, schema=None, **kw): FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace WHERE (%s) - AND c.relname = :table_name AND c.relkind in ('r','v') + AND c.relname = :table_name AND c.relkind in ('r', 'v', 'm', 'f') """ % schema_where_clause # Since we're binding to unicode, table_name and schema_name must be # unicode. @@ -1812,11 +2283,11 @@ def get_schema_names(self, connection, **kw): # what about system tables? if util.py2k: - schema_names = [row[0].decode(self.encoding) for row in rp \ - if not row[0].startswith('pg_')] + schema_names = [row[0].decode(self.encoding) for row in rp + if not row[0].startswith('pg_')] else: - schema_names = [row[0] for row in rp \ - if not row[0].startswith('pg_')] + schema_names = [row[0] for row in rp + if not row[0].startswith('pg_')] return schema_names @reflection.cache @@ -1828,12 +2299,30 @@ def get_table_names(self, connection, schema=None, **kw): result = connection.execute( sql.text("SELECT relname FROM pg_class c " - "WHERE relkind = 'r' " - "AND '%s' = (select nspname from pg_namespace n " - "where n.oid = c.relnamespace) " % - current_schema, - typemap={'relname': sqltypes.Unicode} - ) + "WHERE relkind = 'r' " + "AND '%s' = (select nspname from pg_namespace n " + "where n.oid = c.relnamespace) " % + current_schema, + typemap={'relname': sqltypes.Unicode} + ) + ) + return [row[0] for row in result] + + @reflection.cache + def _get_foreign_table_names(self, connection, schema=None, **kw): + if schema is not None: + current_schema = schema + else: + current_schema = self.default_schema_name + + result = connection.execute( + sql.text("SELECT relname FROM pg_class c " + "WHERE relkind = 'f' " + "AND '%s' = (select nspname from pg_namespace n " + "where n.oid = c.relnamespace) " % + current_schema, + typemap={'relname': sqltypes.Unicode} + ) ) return [row[0] for row in result] @@ -1846,14 +2335,14 @@ def get_view_names(self, connection, schema=None, **kw): s = """ SELECT relname FROM pg_class c - WHERE relkind = 'v' + WHERE relkind IN ('m', 'v') AND '%(schema)s' = (select nspname from pg_namespace n where n.oid = c.relnamespace) """ % dict(schema=current_schema) if util.py2k: view_names = [row[0].decode(self.encoding) - for row in connection.execute(s)] + for row in connection.execute(s)] else: view_names = [row[0] for row in connection.execute(s)] return view_names @@ -1898,13 +2387,21 @@ def get_columns(self, connection, table_name, schema=None, **kw): ORDER BY a.attnum """ s = sql.text(SQL_COLS, - bindparams=[sql.bindparam('table_oid', type_=sqltypes.Integer)], - typemap={'attname': sqltypes.Unicode, 'default': sqltypes.Unicode} - ) + bindparams=[ + sql.bindparam('table_oid', type_=sqltypes.Integer)], + typemap={ + 'attname': sqltypes.Unicode, + 'default': sqltypes.Unicode} + ) c = connection.execute(s, table_oid=table_oid) rows = c.fetchall() domains = self._load_domains(connection) - enums = self._load_enums(connection) + enums = dict( + ( + "%s.%s" % (rec['schema'], rec['name']) + if not rec['visible'] else rec['name'], rec) for rec in + self._load_enums(connection, schema='*') + ) # format columns columns = [] @@ -1916,7 +2413,7 @@ def get_columns(self, connection, table_name, schema=None, **kw): def _get_column_info(self, name, format_type, default, notnull, domains, enums, schema): - ## strip (*) from character varying(5), timestamp(5) + # strip (*) from character varying(5), timestamp(5) # with time zone, geometry(POLYGON), etc. attype = re.sub(r'\(.*\)', '', format_type) @@ -1964,7 +2461,7 @@ def _get_column_info(self, name, format_type, default, else: args = () elif attype in ('interval', 'interval year to month', - 'interval day to second'): + 'interval day to second'): if charlen: kwargs['precision'] = int(charlen) args = () @@ -1978,10 +2475,9 @@ def _get_column_info(self, name, format_type, default, elif attype in enums: enum = enums[attype] coltype = ENUM - if "." in attype: - kwargs['schema'], kwargs['name'] = attype.split('.') - else: - kwargs['name'] = attype + kwargs['name'] = enum['name'] + if not enum['visible']: + kwargs['schema'] = enum['schema'] args = tuple(enum['labels']) break elif attype in domains: @@ -2019,8 +2515,8 @@ def _get_column_info(self, name, format_type, default, # later be enhanced to obey quoting rules / # "quote schema" default = match.group(1) + \ - ('"%s"' % sch) + '.' + \ - match.group(2) + match.group(3) + ('"%s"' % sch) + '.' + \ + match.group(2) + match.group(3) column_info = dict(name=name, type=coltype, nullable=nullable, default=default, autoincrement=autoincrement) @@ -2076,7 +2572,7 @@ def get_pk_constraint(self, connection, table_name, schema=None, **kw): @reflection.cache def get_foreign_keys(self, connection, table_name, schema=None, - postgresql_ignore_search_path=False, **kw): + postgresql_ignore_search_path=False, **kw): preparer = self.identifier_preparer table_oid = self.get_table_oid(connection, table_name, schema, info_cache=kw.get('info_cache')) @@ -2099,29 +2595,32 @@ def get_foreign_keys(self, connection, table_name, schema=None, FK_REGEX = re.compile( r'FOREIGN KEY \((.*?)\) REFERENCES (?:(.*?)\.)?(.*?)\((.*?)\)' r'[\s]?(MATCH (FULL|PARTIAL|SIMPLE)+)?' - r'[\s]?(ON UPDATE (CASCADE|RESTRICT|NO ACTION|SET NULL|SET DEFAULT)+)?' - r'[\s]?(ON DELETE (CASCADE|RESTRICT|NO ACTION|SET NULL|SET DEFAULT)+)?' + r'[\s]?(ON UPDATE ' + r'(CASCADE|RESTRICT|NO ACTION|SET NULL|SET DEFAULT)+)?' + r'[\s]?(ON DELETE ' + r'(CASCADE|RESTRICT|NO ACTION|SET NULL|SET DEFAULT)+)?' r'[\s]?(DEFERRABLE|NOT DEFERRABLE)?' r'[\s]?(INITIALLY (DEFERRED|IMMEDIATE)+)?' ) t = sql.text(FK_SQL, typemap={ - 'conname': sqltypes.Unicode, - 'condef': sqltypes.Unicode}) + 'conname': sqltypes.Unicode, + 'condef': sqltypes.Unicode}) c = connection.execute(t, table=table_oid) fkeys = [] for conname, condef, conschema in c.fetchall(): m = re.search(FK_REGEX, condef).groups() constrained_columns, referred_schema, \ - referred_table, referred_columns, \ - _, match, _, onupdate, _, ondelete, \ - deferrable, _, initially = m + referred_table, referred_columns, \ + _, match, _, onupdate, _, ondelete, \ + deferrable, _, initially = m if deferrable is not None: deferrable = True if deferrable == 'DEFERRABLE' else False constrained_columns = [preparer._unquote_identifier(x) - for x in re.split(r'\s*,\s*', constrained_columns)] + for x in re.split( + r'\s*,\s*', constrained_columns)] if postgresql_ignore_search_path: # when ignoring search path, we use the actual schema @@ -2135,7 +2634,7 @@ def get_foreign_keys(self, connection, table_name, schema=None, # pg_get_constraintdef(). If the schema is in the search # path, pg_get_constraintdef() will give us None. referred_schema = \ - preparer._unquote_identifier(referred_schema) + preparer._unquote_identifier(referred_schema) elif schema is not None and schema == conschema: # If the actual schema matches the schema of the table # we're reflecting, then we will use that. @@ -2143,7 +2642,8 @@ def get_foreign_keys(self, connection, table_name, schema=None, referred_table = preparer._unquote_identifier(referred_table) referred_columns = [preparer._unquote_identifier(x) - for x in re.split(r'\s*,\s', referred_columns)] + for x in + re.split(r'\s*,\s', referred_columns)] fkey_d = { 'name': conname, 'constrained_columns': constrained_columns, @@ -2170,9 +2670,9 @@ def _pg_index_any(self, col, compare_to): # for now. # regards, tom lane" return "(%s)" % " OR ".join( - "%s[%d] = %s" % (compare_to, ind, col) - for ind in range(0, 10) - ) + "%s[%d] = %s" % (compare_to, ind, col) + for ind in range(0, 10) + ) else: return "%s = ANY(%s)" % (col, compare_to) @@ -2184,32 +2684,69 @@ def get_indexes(self, connection, table_name, schema, **kw): # cast indkey as varchar since it's an int2vector, # returned as a list by some drivers such as pypostgresql - IDX_SQL = """ - SELECT - i.relname as relname, - ix.indisunique, ix.indexprs, ix.indpred, - a.attname, a.attnum, ix.indkey%s - FROM - pg_class t - join pg_index ix on t.oid = ix.indrelid - join pg_class i on i.oid=ix.indexrelid - left outer join - pg_attribute a - on t.oid=a.attrelid and %s - WHERE - t.relkind = 'r' - and t.oid = :table_oid - and ix.indisprimary = 'f' - ORDER BY - t.relname, - i.relname - """ % ( + if self.server_version_info < (8, 5): + IDX_SQL = """ + SELECT + i.relname as relname, + ix.indisunique, ix.indexprs, ix.indpred, + a.attname, a.attnum, NULL, ix.indkey%s, + %s, am.amname + FROM + pg_class t + join pg_index ix on t.oid = ix.indrelid + join pg_class i on i.oid = ix.indexrelid + left outer join + pg_attribute a + on t.oid = a.attrelid and %s + left outer join + pg_am am + on i.relam = am.oid + WHERE + t.relkind IN ('r', 'v', 'f', 'm') + and t.oid = :table_oid + and ix.indisprimary = 'f' + ORDER BY + t.relname, + i.relname + """ % ( # version 8.3 here was based on observing the # cast does not work in PG 8.2.4, does work in 8.3.0. # nothing in PG changelogs regarding this. "::varchar" if self.server_version_info >= (8, 3) else "", + "i.reloptions" if self.server_version_info >= (8, 2) + else "NULL", self._pg_index_any("a.attnum", "ix.indkey") ) + else: + IDX_SQL = """ + SELECT + i.relname as relname, + ix.indisunique, ix.indexprs, ix.indpred, + a.attname, a.attnum, c.conrelid, ix.indkey::varchar, + i.reloptions, am.amname + FROM + pg_class t + join pg_index ix on t.oid = ix.indrelid + join pg_class i on i.oid = ix.indexrelid + left outer join + pg_attribute a + on t.oid = a.attrelid and a.attnum = ANY(ix.indkey) + left outer join + pg_constraint c + on (ix.indrelid = c.conrelid and + ix.indexrelid = c.conindid and + c.contype in ('p', 'u', 'x')) + left outer join + pg_am am + on i.relam = am.oid + WHERE + t.relkind IN ('r', 'v', 'f', 'm') + and t.oid = :table_oid + and ix.indisprimary = 'f' + ORDER BY + t.relname, + i.relname + """ t = sql.text(IDX_SQL, typemap={'attname': sqltypes.Unicode}) c = connection.execute(t, table_oid=table_oid) @@ -2218,35 +2755,61 @@ def get_indexes(self, connection, table_name, schema, **kw): sv_idx_name = None for row in c.fetchall(): - idx_name, unique, expr, prd, col, col_num, idx_key = row + (idx_name, unique, expr, prd, col, + col_num, conrelid, idx_key, options, amname) = row if expr: if idx_name != sv_idx_name: util.warn( - "Skipped unsupported reflection of " - "expression-based index %s" - % idx_name) + "Skipped unsupported reflection of " + "expression-based index %s" + % idx_name) sv_idx_name = idx_name continue if prd and not idx_name == sv_idx_name: util.warn( - "Predicate of partial index %s ignored during reflection" - % idx_name) + "Predicate of partial index %s ignored during reflection" + % idx_name) sv_idx_name = idx_name + has_idx = idx_name in indexes index = indexes[idx_name] if col is not None: index['cols'][col_num] = col - index['key'] = [int(k.strip()) for k in idx_key.split()] - index['unique'] = unique - - return [ - {'name': name, - 'unique': idx['unique'], - 'column_names': [idx['cols'][i] for i in idx['key']]} - for name, idx in indexes.items() - ] + if not has_idx: + index['key'] = [int(k.strip()) for k in idx_key.split()] + index['unique'] = unique + if conrelid is not None: + index['duplicates_constraint'] = idx_name + if options: + index['options'] = dict( + [option.split("=") for option in options]) + + # it *might* be nice to include that this is 'btree' in the + # reflection info. But we don't want an Index object + # to have a ``postgresql_using`` in it that is just the + # default, so for the moment leaving this out. + if amname and amname != 'btree': + index['amname'] = amname + + result = [] + for name, idx in indexes.items(): + entry = { + 'name': name, + 'unique': idx['unique'], + 'column_names': [idx['cols'][i] for i in idx['key']] + } + if 'duplicates_constraint' in idx: + entry['duplicates_constraint'] = idx['duplicates_constraint'] + if 'options' in idx: + entry.setdefault( + 'dialect_options', {})["postgresql_with"] = idx['options'] + if 'amname' in idx: + entry.setdefault( + 'dialect_options', {})["postgresql_using"] = idx['amname'] + result.append(entry) + return result @reflection.cache def get_unique_constraints(self, connection, table_name, @@ -2263,7 +2826,8 @@ def get_unique_constraints(self, connection, table_name, FROM pg_catalog.pg_constraint cons join pg_attribute a - on cons.conrelid = a.attrelid AND a.attnum = ANY(cons.conkey) + on cons.conrelid = a.attrelid AND + a.attnum = ANY(cons.conkey) WHERE cons.conrelid = :table_oid AND cons.contype = 'u' @@ -2284,11 +2848,12 @@ def get_unique_constraints(self, connection, table_name, for name, uc in uniques.items() ] - def _load_enums(self, connection): + def _load_enums(self, connection, schema=None): + schema = schema or self.default_schema_name if not self.supports_native_enum: return {} - ## Load data types for enums: + # Load data types for enums: SQL_ENUMS = """ SELECT t.typname as "name", -- no enum defaults in 8.4 at least @@ -2300,36 +2865,42 @@ def _load_enums(self, connection): LEFT JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace LEFT JOIN pg_catalog.pg_enum e ON t.oid = e.enumtypid WHERE t.typtype = 'e' - ORDER BY "name", e.oid -- e.oid gives us label order """ + if schema != '*': + SQL_ENUMS += "AND n.nspname = :schema " + + # e.oid gives us label order within an enum + SQL_ENUMS += 'ORDER BY "schema", "name", e.oid' + s = sql.text(SQL_ENUMS, typemap={ - 'attname': sqltypes.Unicode, - 'label': sqltypes.Unicode}) + 'attname': sqltypes.Unicode, + 'label': sqltypes.Unicode}) + + if schema != '*': + s = s.bindparams(schema=schema) + c = connection.execute(s) - enums = {} + enums = [] + enum_by_name = {} for enum in c.fetchall(): - if enum['visible']: - # 'visible' just means whether or not the enum is in a - # schema that's on the search path -- or not overridden by - # a schema with higher precedence. If it's not visible, - # it will be prefixed with the schema-name when it's used. - name = enum['name'] - else: - name = "%s.%s" % (enum['schema'], enum['name']) - - if name in enums: - enums[name]['labels'].append(enum['label']) + key = (enum['schema'], enum['name']) + if key in enum_by_name: + enum_by_name[key]['labels'].append(enum['label']) else: - enums[name] = { - 'labels': [enum['label']], - } + enum_by_name[key] = enum_rec = { + 'name': enum['name'], + 'schema': enum['schema'], + 'visible': enum['visible'], + 'labels': [enum['label']], + } + enums.append(enum_rec) return enums def _load_domains(self, connection): - ## Load data types for domains: + # Load data types for domains: SQL_DOMAINS = """ SELECT t.typname as "name", pg_catalog.format_type(t.typbasetype, t.typtypmod) as "attype", @@ -2347,7 +2918,7 @@ def _load_domains(self, connection): domains = {} for domain in c.fetchall(): - ## strip (30) from character varying(30) + # strip (30) from character varying(30) attype = re.search('([^\(]+)', domain['attype']).group(1) if domain['visible']: # 'visible' just means whether or not the domain is in a @@ -2359,9 +2930,9 @@ def _load_domains(self, connection): name = "%s.%s" % (domain['schema'], domain['name']) domains[name] = { - 'attype': attype, - 'nullable': domain['nullable'], - 'default': domain['default'] - } + 'attype': attype, + 'nullable': domain['nullable'], + 'default': domain['default'] + } return domains diff --git a/lib/sqlalchemy/dialects/postgresql/constraints.py b/lib/sqlalchemy/dialects/postgresql/constraints.py index f45cef1a29..c6bb890946 100644 --- a/lib/sqlalchemy/dialects/postgresql/constraints.py +++ b/lib/sqlalchemy/dialects/postgresql/constraints.py @@ -1,9 +1,12 @@ -# Copyright (C) 2013-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2013-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -from sqlalchemy.schema import ColumnCollectionConstraint -from sqlalchemy.sql import expression +from ...sql.schema import ColumnCollectionConstraint +from ...sql import expression +from ... import util + class ExcludeConstraint(ColumnCollectionConstraint): """A table-level EXCLUDE constraint. @@ -11,7 +14,8 @@ class ExcludeConstraint(ColumnCollectionConstraint): Defines an EXCLUDE constraint as described in the `postgres documentation`__. - __ http://www.postgresql.org/docs/9.0/static/sql-createtable.html#SQL-CREATETABLE-EXCLUDE + __ http://www.postgresql.org/docs/9.0/\ +static/sql-createtable.html#SQL-CREATETABLE-EXCLUDE """ __visit_name__ = 'exclude_constraint' @@ -45,29 +49,50 @@ def __init__(self, *elements, **kw): for this constraint. """ + columns = [] + render_exprs = [] + self.operators = {} + + expressions, operators = zip(*elements) + + for (expr, column, strname, add_element), operator in zip( + self._extract_col_expression_collection(expressions), + operators + ): + if add_element is not None: + columns.append(add_element) + + name = column.name if column is not None else strname + + if name is not None: + # backwards compat + self.operators[name] = operator + + expr = expression._literal_as_text(expr) + + render_exprs.append( + (expr, name, operator) + ) + + self._render_exprs = render_exprs ColumnCollectionConstraint.__init__( self, - *[col for col, op in elements], + *columns, name=kw.get('name'), deferrable=kw.get('deferrable'), initially=kw.get('initially') - ) - self.operators = {} - for col_or_string, op in elements: - name = getattr(col_or_string, 'name', col_or_string) - self.operators[name] = op + ) self.using = kw.get('using', 'gist') where = kw.get('where') - if where: - self.where = expression._literal_as_text(where) + if where is not None: + self.where = expression._literal_as_text(where) def copy(self, **kw): elements = [(col, self.operators[col]) for col in self.columns.keys()] c = self.__class__(*elements, - name=self.name, - deferrable=self.deferrable, - initially=self.initially) + name=self.name, + deferrable=self.deferrable, + initially=self.initially) c.dispatch._update(self.dispatch) return c - diff --git a/lib/sqlalchemy/dialects/postgresql/hstore.py b/lib/sqlalchemy/dialects/postgresql/hstore.py index 76562088dd..a4ff461866 100644 --- a/lib/sqlalchemy/dialects/postgresql/hstore.py +++ b/lib/sqlalchemy/dialects/postgresql/hstore.py @@ -1,5 +1,6 @@ # postgresql/hstore.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -52,7 +53,7 @@ def _parse_error(hstore_str, pos): def _parse_hstore(hstore_str): - """Parse an hstore from it's literal string representation. + """Parse an hstore from its literal string representation. Attempts to approximate PG's hstore input parsing rules as closely as possible. Although currently this is not strictly necessary, since the @@ -68,11 +69,13 @@ def _parse_hstore(hstore_str): pair_match = HSTORE_PAIR_RE.match(hstore_str) while pair_match is not None: - key = pair_match.group('key').replace(r'\"', '"').replace("\\\\", "\\") + key = pair_match.group('key').replace(r'\"', '"').replace( + "\\\\", "\\") if pair_match.group('value_null'): value = None else: - value = pair_match.group('value').replace(r'\"', '"').replace("\\\\", "\\") + value = pair_match.group('value').replace( + r'\"', '"').replace("\\\\", "\\") result[key] = value pos += pair_match.end() @@ -139,15 +142,16 @@ class HSTORE(sqltypes.Concatenable, sqltypes.TypeEngine): data_table.c.data + {"k1": "v1"} - For a full list of special methods see :class:`.HSTORE.comparator_factory`. + For a full list of special methods see + :class:`.HSTORE.comparator_factory`. For usage with the SQLAlchemy ORM, it may be desirable to combine the usage of :class:`.HSTORE` with :class:`.MutableDict` dictionary now part of the :mod:`sqlalchemy.ext.mutable` extension. This extension will allow "in-place" changes to the dictionary, e.g. addition of new keys or replacement/removal of existing - keys to/from the current dictionary, to produce events which will be detected - by the unit of work:: + keys to/from the current dictionary, to produce events which will be + detected by the unit of work:: from sqlalchemy.ext.mutable import MutableDict @@ -166,9 +170,9 @@ class MyClass(Base): session.commit() When the :mod:`sqlalchemy.ext.mutable` extension is not used, the ORM - will not be alerted to any changes to the contents of an existing dictionary, - unless that dictionary value is re-assigned to the HSTORE-attribute itself, - thus generating a change event. + will not be alerted to any changes to the contents of an existing + dictionary, unless that dictionary value is re-assigned to the + HSTORE-attribute itself, thus generating a change event. .. versionadded:: 0.8 @@ -180,6 +184,7 @@ class MyClass(Base): """ __visit_name__ = 'HSTORE' + hashable = False class comparator_factory(sqltypes.Concatenable.Comparator): """Define comparison operations for :class:`.HSTORE`.""" @@ -270,6 +275,7 @@ def _adapt_expression(self, op, other_comparator): def bind_processor(self, dialect): if util.py2k: encoding = dialect.encoding + def process(value): if isinstance(value, dict): return _serialize_hstore(value).encode(encoding) @@ -286,6 +292,7 @@ def process(value): def result_processor(self, dialect, coltype): if util.py2k: encoding = dialect.encoding + def process(value): if value is not None: return _parse_hstore(value.decode(encoding)) diff --git a/lib/sqlalchemy/dialects/postgresql/json.py b/lib/sqlalchemy/dialects/postgresql/json.py index 2e29185e8a..f7ede85c9e 100644 --- a/lib/sqlalchemy/dialects/postgresql/json.py +++ b/lib/sqlalchemy/dialects/postgresql/json.py @@ -1,5 +1,6 @@ # postgresql/json.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -11,10 +12,10 @@ from ... import types as sqltypes from ...sql.operators import custom_op from ... import sql -from ...sql import elements +from ...sql import elements, default_comparator from ... import util -__all__ = ('JSON', 'JSONElement') +__all__ = ('JSON', 'JSONElement', 'JSONB') class JSONElement(elements.BinaryExpression): @@ -26,24 +27,29 @@ class JSONElement(elements.BinaryExpression): expr = mytable.c.json_data['some_key'] The expression typically compiles to a JSON access such as ``col -> key``. - Modifiers are then available for typing behavior, including :meth:`.JSONElement.cast` - and :attr:`.JSONElement.astext`. + Modifiers are then available for typing behavior, including + :meth:`.JSONElement.cast` and :attr:`.JSONElement.astext`. """ - def __init__(self, left, right, astext=False, opstring=None, result_type=None): + + def __init__(self, left, right, astext=False, + opstring=None, result_type=None): self._astext = astext if opstring is None: if hasattr(right, '__iter__') and \ - not isinstance(right, util.string_types): + not isinstance(right, util.string_types): opstring = "#>" - right = "{%s}" % (", ".join(util.text_type(elem) for elem in right)) + right = "{%s}" % ( + ", ".join(util.text_type(elem) for elem in right)) else: opstring = "->" self._json_opstring = opstring operator = custom_op(opstring, precedence=5) - right = left._check_literal(left, operator, right) - super(JSONElement, self).__init__(left, right, operator, type_=result_type) + right = default_comparator._check_literal( + left, operator, right) + super(JSONElement, self).__init__( + left, right, operator, type_=result_type) @property def astext(self): @@ -63,16 +69,16 @@ def astext(self): return self else: return JSONElement( - self.left, - self.right, - astext=True, - opstring=self._json_opstring + ">", - result_type=sqltypes.String(convert_unicode=True) - ) + self.left, + self.right, + astext=True, + opstring=self._json_opstring + ">", + result_type=sqltypes.String(convert_unicode=True) + ) def cast(self, type_): """Convert this :class:`.JSONElement` to apply both the 'astext' operator - as well as an explicit type cast when evaulated. + as well as an explicit type cast when evaluated. E.g.:: @@ -125,15 +131,16 @@ class JSON(sqltypes.TypeEngine): * Path index operations returning text (required for text comparison):: - data_table.c.data[('key_1', 'key_2', ..., 'key_n')].astext == 'some value' + data_table.c.data[('key_1', 'key_2', ..., 'key_n')].astext == \\ + 'some value' - Index operations return an instance of :class:`.JSONElement`, which represents - an expression such as ``column -> index``. This element then defines - methods such as :attr:`.JSONElement.astext` and :meth:`.JSONElement.cast` - for setting up type behavior. + Index operations return an instance of :class:`.JSONElement`, which + represents an expression such as ``column -> index``. This element then + defines methods such as :attr:`.JSONElement.astext` and + :meth:`.JSONElement.cast` for setting up type behavior. - The :class:`.JSON` type, when used with the SQLAlchemy ORM, does not detect - in-place mutations to the structure. In order to detect these, the + The :class:`.JSON` type, when used with the SQLAlchemy ORM, does not + detect in-place mutations to the structure. In order to detect these, the :mod:`sqlalchemy.ext.mutable` extension must be used. This extension will allow "in-place" changes to the datastructure to produce events which will be detected by the unit of work. See the example at :class:`.HSTORE` @@ -158,6 +165,23 @@ class JSON(sqltypes.TypeEngine): __visit_name__ = 'JSON' + def __init__(self, none_as_null=False): + """Construct a :class:`.JSON` type. + + :param none_as_null: if True, persist the value ``None`` as a + SQL NULL value, not the JSON encoding of ``null``. Note that + when this flag is False, the :func:`.null` construct can still + be used to persist a NULL value:: + + from sqlalchemy import null + conn.execute(table.insert(), data=null()) + + .. versionchanged:: 0.9.8 - Added ``none_as_null``, and :func:`.null` + is now supported in order to persist a NULL value. + + """ + self.none_as_null = none_as_null + class comparator_factory(sqltypes.Concatenable.Comparator): """Define comparison operations for :class:`.JSON`.""" @@ -177,10 +201,19 @@ def bind_processor(self, dialect): json_serializer = dialect._json_serializer or json.dumps if util.py2k: encoding = dialect.encoding + def process(value): + if isinstance(value, elements.Null) or ( + value is None and self.none_as_null + ): + return None return json_serializer(value).encode(encoding) else: def process(value): + if isinstance(value, elements.Null) or ( + value is None and self.none_as_null + ): + return None return json_serializer(value) return process @@ -188,12 +221,138 @@ def result_processor(self, dialect, coltype): json_deserializer = dialect._json_deserializer or json.loads if util.py2k: encoding = dialect.encoding + def process(value): + if value is None: + return None return json_deserializer(value.decode(encoding)) else: def process(value): + if value is None: + return None return json_deserializer(value) return process ischema_names['json'] = JSON + + +class JSONB(JSON): + """Represent the Postgresql JSONB type. + + The :class:`.JSONB` type stores arbitrary JSONB format data, e.g.:: + + data_table = Table('data_table', metadata, + Column('id', Integer, primary_key=True), + Column('data', JSONB) + ) + + with engine.connect() as conn: + conn.execute( + data_table.insert(), + data = {"key1": "value1", "key2": "value2"} + ) + + :class:`.JSONB` provides several operations: + + * Index operations:: + + data_table.c.data['some key'] + + * Index operations returning text (required for text comparison):: + + data_table.c.data['some key'].astext == 'some value' + + * Index operations with a built-in CAST call:: + + data_table.c.data['some key'].cast(Integer) == 5 + + * Path index operations:: + + data_table.c.data[('key_1', 'key_2', ..., 'key_n')] + + * Path index operations returning text (required for text comparison):: + + data_table.c.data[('key_1', 'key_2', ..., 'key_n')].astext == \\ + 'some value' + + Index operations return an instance of :class:`.JSONElement`, which + represents an expression such as ``column -> index``. This element then + defines methods such as :attr:`.JSONElement.astext` and + :meth:`.JSONElement.cast` for setting up type behavior. + + The :class:`.JSON` type, when used with the SQLAlchemy ORM, does not + detect in-place mutations to the structure. In order to detect these, the + :mod:`sqlalchemy.ext.mutable` extension must be used. This extension will + allow "in-place" changes to the datastructure to produce events which + will be detected by the unit of work. See the example at :class:`.HSTORE` + for a simple example involving a dictionary. + + Custom serializers and deserializers are specified at the dialect level, + that is using :func:`.create_engine`. The reason for this is that when + using psycopg2, the DBAPI only allows serializers at the per-cursor + or per-connection level. E.g.:: + + engine = create_engine("postgresql://scott:tiger@localhost/test", + json_serializer=my_serialize_fn, + json_deserializer=my_deserialize_fn + ) + + When using the psycopg2 dialect, the json_deserializer is registered + against the database using ``psycopg2.extras.register_default_json``. + + .. versionadded:: 0.9.7 + + """ + + __visit_name__ = 'JSONB' + hashable = False + + class comparator_factory(sqltypes.Concatenable.Comparator): + """Define comparison operations for :class:`.JSON`.""" + + def __getitem__(self, other): + """Get the value at a given key.""" + + return JSONElement(self.expr, other) + + def _adapt_expression(self, op, other_comparator): + # How does one do equality?? jsonb also has "=" eg. + # '[1,2,3]'::jsonb = '[1,2,3]'::jsonb + if isinstance(op, custom_op): + if op.opstring in ['?', '?&', '?|', '@>', '<@']: + return op, sqltypes.Boolean + if op.opstring == '->': + return op, sqltypes.Text + return sqltypes.Concatenable.Comparator.\ + _adapt_expression(self, op, other_comparator) + + def has_key(self, other): + """Boolean expression. Test for presence of a key. Note that the + key may be a SQLA expression. + """ + return self.expr.op('?')(other) + + def has_all(self, other): + """Boolean expression. Test for presence of all keys in jsonb + """ + return self.expr.op('?&')(other) + + def has_any(self, other): + """Boolean expression. Test for presence of any key in jsonb + """ + return self.expr.op('?|')(other) + + def contains(self, other, **kwargs): + """Boolean expression. Test if keys (or array) are a superset of/contained + the keys of the argument jsonb expression. + """ + return self.expr.op('@>')(other) + + def contained_by(self, other): + """Boolean expression. Test if keys are a proper subset of the + keys of the argument jsonb expression. + """ + return self.expr.op('<@')(other) + +ischema_names['jsonb'] = JSONB diff --git a/lib/sqlalchemy/dialects/postgresql/pg8000.py b/lib/sqlalchemy/dialects/postgresql/pg8000.py index bc73f9757d..68e8e029dd 100644 --- a/lib/sqlalchemy/dialects/postgresql/pg8000.py +++ b/lib/sqlalchemy/dialects/postgresql/pg8000.py @@ -1,5 +1,6 @@ # postgresql/pg8000.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -8,31 +9,70 @@ .. dialect:: postgresql+pg8000 :name: pg8000 :dbapi: pg8000 - :connectstring: postgresql+pg8000://user:password@host:port/dbname[?key=value&key=value...] - :url: http://pybrary.net/pg8000/ + :connectstring: \ +postgresql+pg8000://user:password@host:port/dbname[?key=value&key=value...] + :url: https://pythonhosted.org/pg8000/ + + +.. _pg8000_unicode: Unicode ------- -pg8000 requires that the postgresql client encoding be -configured in the postgresql.conf file in order to use encodings -other than ascii. Set this value to the same value as the -"encoding" parameter on create_engine(), usually "utf-8". +pg8000 will encode / decode string values between it and the server using the +PostgreSQL ``client_encoding`` parameter; by default this is the value in +the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``. +Typically, this can be changed to ``utf-8``, as a more useful default:: + + #client_encoding = sql_ascii # actually, defaults to database + # encoding + client_encoding = utf8 + +The ``client_encoding`` can be overriden for a session by executing the SQL: + +SET CLIENT_ENCODING TO 'utf8'; + +SQLAlchemy will execute this SQL on all new connections based on the value +passed to :func:`.create_engine` using the ``client_encoding`` parameter:: + + engine = create_engine( + "postgresql+pg8000://user:pass@host/dbname", client_encoding='utf8') + + +.. _pg8000_isolation_level: + +pg8000 Transaction Isolation Level +------------------------------------- -Interval --------- +The pg8000 dialect offers the same isolation level settings as that +of the :ref:`psycopg2 ` dialect: + +* ``READ COMMITTED`` +* ``READ UNCOMMITTED`` +* ``REPEATABLE READ`` +* ``SERIALIZABLE`` +* ``AUTOCOMMIT`` + +.. versionadded:: 0.9.5 support for AUTOCOMMIT isolation level when using + pg8000. + +.. seealso:: + + :ref:`postgresql_isolation_level` + + :ref:`psycopg2_isolation_level` -Passing data from/to the Interval type is not supported as of -yet. """ from ... import util, exc import decimal from ... import processors from ... import types as sqltypes -from .base import PGDialect, \ - PGCompiler, PGIdentifierPreparer, PGExecutionContext,\ - _DECIMAL_TYPES, _FLOAT_TYPES, _INT_TYPES +from .base import ( + PGDialect, PGCompiler, PGIdentifierPreparer, PGExecutionContext, + _DECIMAL_TYPES, _FLOAT_TYPES, _INT_TYPES) +import re +from sqlalchemy.dialects.postgresql.json import JSON class _PGNumeric(sqltypes.Numeric): @@ -40,14 +80,13 @@ def result_processor(self, dialect, coltype): if self.asdecimal: if coltype in _FLOAT_TYPES: return processors.to_decimal_processor_factory( - decimal.Decimal, - self._effective_decimal_return_scale) + decimal.Decimal, self._effective_decimal_return_scale) elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES: # pg8000 returns Decimal natively for 1700 return None else: raise exc.InvalidRequestError( - "Unknown PG numeric type: %d" % coltype) + "Unknown PG numeric type: %d" % coltype) else: if coltype in _FLOAT_TYPES: # pg8000 returns float natively for 701 @@ -56,7 +95,7 @@ def result_processor(self, dialect, coltype): return processors.to_float else: raise exc.InvalidRequestError( - "Unknown PG numeric type: %d" % coltype) + "Unknown PG numeric type: %d" % coltype) class _PGNumericNoBind(_PGNumeric): @@ -64,6 +103,15 @@ def bind_processor(self, dialect): return None +class _PGJSON(JSON): + + def result_processor(self, dialect, coltype): + if dialect._dbapi_version > (1, 10, 1): + return None # Has native JSON + else: + return super(_PGJSON, self).result_processor(dialect, coltype) + + class PGExecutionContext_pg8000(PGExecutionContext): pass @@ -71,7 +119,7 @@ class PGExecutionContext_pg8000(PGExecutionContext): class PGCompiler_pg8000(PGCompiler): def visit_mod_binary(self, binary, operator, **kw): return self.process(binary.left, **kw) + " %% " + \ - self.process(binary.right, **kw) + self.process(binary.right, **kw) def post_process_text(self, text): if '%%' in text: @@ -95,7 +143,7 @@ class PGDialect_pg8000(PGDialect): supports_unicode_binds = True default_paramstyle = 'format' - supports_sane_multi_rowcount = False + supports_sane_multi_rowcount = True execution_ctx_cls = PGExecutionContext_pg8000 statement_compiler = PGCompiler_pg8000 preparer = PGIdentifierPreparer_pg8000 @@ -105,13 +153,32 @@ class PGDialect_pg8000(PGDialect): PGDialect.colspecs, { sqltypes.Numeric: _PGNumericNoBind, - sqltypes.Float: _PGNumeric + sqltypes.Float: _PGNumeric, + JSON: _PGJSON, } ) + def __init__(self, client_encoding=None, **kwargs): + PGDialect.__init__(self, **kwargs) + self.client_encoding = client_encoding + + def initialize(self, connection): + self.supports_sane_multi_rowcount = self._dbapi_version >= (1, 9, 14) + super(PGDialect_pg8000, self).initialize(connection) + + @util.memoized_property + def _dbapi_version(self): + if self.dbapi and hasattr(self.dbapi, '__version__'): + return tuple( + [ + int(x) for x in re.findall( + r'(\d+)(?:[-\.]?|$)', self.dbapi.__version__)]) + else: + return (99, 99, 99) + @classmethod def dbapi(cls): - return __import__('pg8000').dbapi + return __import__('pg8000') def create_connect_args(self, url): opts = url.translate_connect_args(username='user') @@ -123,4 +190,75 @@ def create_connect_args(self, url): def is_disconnect(self, e, connection, cursor): return "connection is closed" in str(e) + def set_isolation_level(self, connection, level): + level = level.replace('_', ' ') + + # adjust for ConnectionFairy possibly being present + if hasattr(connection, 'connection'): + connection = connection.connection + + if level == 'AUTOCOMMIT': + connection.autocommit = True + elif level in self._isolation_lookup: + connection.autocommit = False + cursor = connection.cursor() + cursor.execute( + "SET SESSION CHARACTERISTICS AS TRANSACTION " + "ISOLATION LEVEL %s" % level) + cursor.execute("COMMIT") + cursor.close() + else: + raise exc.ArgumentError( + "Invalid value '%s' for isolation_level. " + "Valid isolation levels for %s are %s or AUTOCOMMIT" % + (level, self.name, ", ".join(self._isolation_lookup)) + ) + + def set_client_encoding(self, connection, client_encoding): + # adjust for ConnectionFairy possibly being present + if hasattr(connection, 'connection'): + connection = connection.connection + + cursor = connection.cursor() + cursor.execute("SET CLIENT_ENCODING TO '" + client_encoding + "'") + cursor.execute("COMMIT") + cursor.close() + + def do_begin_twophase(self, connection, xid): + connection.connection.tpc_begin((0, xid, '')) + + def do_prepare_twophase(self, connection, xid): + connection.connection.tpc_prepare() + + def do_rollback_twophase( + self, connection, xid, is_prepared=True, recover=False): + connection.connection.tpc_rollback((0, xid, '')) + + def do_commit_twophase( + self, connection, xid, is_prepared=True, recover=False): + connection.connection.tpc_commit((0, xid, '')) + + def do_recover_twophase(self, connection): + return [row[1] for row in connection.connection.tpc_recover()] + + def on_connect(self): + fns = [] + if self.client_encoding is not None: + def on_connect(conn): + self.set_client_encoding(conn, self.client_encoding) + fns.append(on_connect) + + if self.isolation_level is not None: + def on_connect(conn): + self.set_isolation_level(conn, self.isolation_level) + fns.append(on_connect) + + if len(fns) > 0: + def on_connect(conn): + for fn in fns: + fn(conn) + return on_connect + else: + return None + dialect = PGDialect_pg8000 diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py index ac17706252..0614081090 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py @@ -1,5 +1,6 @@ # postgresql/psycopg2.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -8,7 +9,8 @@ .. dialect:: postgresql+psycopg2 :name: psycopg2 :dbapi: psycopg2 - :connectstring: postgresql+psycopg2://user:password@host:port/dbname[?key=value&key=value...] + :connectstring: postgresql+psycopg2://user:password@host:port/dbname\ +[?key=value&key=value...] :url: http://pypi.python.org/pypi/psycopg2/ psycopg2 Connect Arguments @@ -20,9 +22,9 @@ * ``server_side_cursors``: Enable the usage of "server side cursors" for SQL statements which support this feature. What this essentially means from a psycopg2 point of view is that the cursor is created using a name, e.g. - ``connection.cursor('some name')``, which has the effect that result rows are - not immediately pre-fetched and buffered after statement execution, but are - instead left on the server and only retrieved as needed. SQLAlchemy's + ``connection.cursor('some name')``, which has the effect that result rows + are not immediately pre-fetched and buffered after statement execution, but + are instead left on the server and only retrieved as needed. SQLAlchemy's :class:`~sqlalchemy.engine.ResultProxy` uses special row-buffering behavior when this feature is enabled, such that groups of 100 rows at a time are fetched over the wire to reduce conversational overhead. @@ -30,10 +32,25 @@ way of enabling this mode on a per-execution basis. * ``use_native_unicode``: Enable the usage of Psycopg2 "native unicode" mode per connection. True by default. -* ``isolation_level``: This option, available for all Posgtresql dialects, + + .. seealso:: + + :ref:`psycopg2_disable_native_unicode` + +* ``isolation_level``: This option, available for all PostgreSQL dialects, includes the ``AUTOCOMMIT`` isolation level when using the psycopg2 - dialect. See :ref:`psycopg2_isolation_level`. + dialect. + + .. seealso:: + :ref:`psycopg2_isolation_level` + +* ``client_encoding``: sets the client encoding in a libpq-agnostic way, + using psycopg2's ``set_client_encoding()`` method. + + .. seealso:: + + :ref:`psycopg2_unicode` Unix Domain Connections ------------------------ @@ -49,11 +66,15 @@ was built. This value can be overridden by passing a pathname to psycopg2, using ``host`` as an additional keyword argument:: - create_engine("postgresql+psycopg2://user:password@/dbname?host=/var/lib/postgresql") + create_engine("postgresql+psycopg2://user:password@/dbname?\ +host=/var/lib/postgresql") See also: -`PQconnectdbParams `_ +`PQconnectdbParams `_ + +.. _psycopg2_execution_options: Per-Statement/Connection Execution Options ------------------------------------------- @@ -62,18 +83,27 @@ :meth:`.Connection.execution_options`, :meth:`.Executable.execution_options`, :meth:`.Query.execution_options`, in addition to those not specific to DBAPIs: -* isolation_level - Set the transaction isolation level for the lifespan of a +* ``isolation_level`` - Set the transaction isolation level for the lifespan of a :class:`.Connection` (can only be set on a connection, not a statement or query). See :ref:`psycopg2_isolation_level`. -* stream_results - Enable or disable usage of psycopg2 server side cursors - +* ``stream_results`` - Enable or disable usage of psycopg2 server side cursors - this feature makes use of "named" cursors in combination with special result handling methods so that result rows are not fully buffered. If ``None`` or not set, the ``server_side_cursors`` option of the :class:`.Engine` is used. -Unicode -------- +* ``max_row_buffer`` - when using ``stream_results``, an integer value that + specifies the maximum number of rows to buffer at a time. This is + interpreted by the :class:`.BufferedRowResultProxy`, and if omitted the + buffer will grow to ultimately store 1000 rows at a time. + + .. versionadded:: 1.0.6 + +.. _psycopg2_unicode: + +Unicode with Psycopg2 +---------------------- By default, the psycopg2 driver uses the ``psycopg2.extensions.UNICODE`` extension, such that the DBAPI receives and returns all strings as Python @@ -81,28 +111,54 @@ change. Psycopg2 here will encode/decode string values based on the current "client encoding" setting; by default this is the value in the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``. -Typically, this can be changed to ``utf-8``, as a more useful default:: +Typically, this can be changed to ``utf8``, as a more useful default:: + + # postgresql.conf file - #client_encoding = sql_ascii # actually, defaults to database + # client_encoding = sql_ascii # actually, defaults to database # encoding client_encoding = utf8 A second way to affect the client encoding is to set it within Psycopg2 -locally. SQLAlchemy will call psycopg2's ``set_client_encoding()`` -method (see: http://initd.org/psycopg/docs/connection.html#connection.set_client_encoding) +locally. SQLAlchemy will call psycopg2's +:meth:`psycopg2:connection.set_client_encoding` method on all new connections based on the value passed to :func:`.create_engine` using the ``client_encoding`` parameter:: - engine = create_engine("postgresql://user:pass@host/dbname", client_encoding='utf8') + # set_client_encoding() setting; + # works for *all* Postgresql versions + engine = create_engine("postgresql://user:pass@host/dbname", + client_encoding='utf8') This overrides the encoding specified in the Postgresql client configuration. +When using the parameter in this way, the psycopg2 driver emits +``SET client_encoding TO 'utf8'`` on the connection explicitly, and works +in all Postgresql versions. + +Note that the ``client_encoding`` setting as passed to :func:`.create_engine` +is **not the same** as the more recently added ``client_encoding`` parameter +now supported by libpq directly. This is enabled when ``client_encoding`` +is passed directly to ``psycopg2.connect()``, and from SQLAlchemy is passed +using the :paramref:`.create_engine.connect_args` parameter:: + + # libpq direct parameter setting; + # only works for Postgresql **9.1 and above** + engine = create_engine("postgresql://user:pass@host/dbname", + connect_args={'client_encoding': 'utf8'}) + + # using the query string is equivalent + engine = create_engine("postgresql://user:pass@host/dbname?client_encoding=utf8") + +The above parameter was only added to libpq as of version 9.1 of Postgresql, +so using the previous method is better for cross-version support. -.. versionadded:: 0.7.3 - The psycopg2-specific ``client_encoding`` parameter to - :func:`.create_engine`. +.. _psycopg2_disable_native_unicode: + +Disabling Native Unicode +^^^^^^^^^^^^^^^^^^^^^^^^ SQLAlchemy can also be instructed to skip the usage of the psycopg2 -``UNICODE`` extension and to instead utilize it's own unicode encode/decode +``UNICODE`` extension and to instead utilize its own unicode encode/decode services, which are normally reserved only for those DBAPIs that don't fully support unicode directly. Passing ``use_native_unicode=False`` to :func:`.create_engine` will disable usage of ``psycopg2.extensions.UNICODE``. @@ -111,8 +167,56 @@ using the value of the :func:`.create_engine` ``encoding`` parameter, which defaults to ``utf-8``. SQLAlchemy's own unicode encode/decode functionality is steadily becoming -obsolete as more DBAPIs support unicode fully along with the approach of -Python 3; in modern usage psycopg2 should be relied upon to handle unicode. +obsolete as most DBAPIs now support unicode fully. + +Bound Parameter Styles +---------------------- + +The default parameter style for the psycopg2 dialect is "pyformat", where +SQL is rendered using ``%(paramname)s`` style. This format has the limitation +that it does not accommodate the unusual case of parameter names that +actually contain percent or parenthesis symbols; as SQLAlchemy in many cases +generates bound parameter names based on the name of a column, the presence +of these characters in a column name can lead to problems. + +There are two solutions to the issue of a :class:`.schema.Column` that contains +one of these characters in its name. One is to specify the +:paramref:`.schema.Column.key` for columns that have such names:: + + measurement = Table('measurement', metadata, + Column('Size (meters)', Integer, key='size_meters') + ) + +Above, an INSERT statement such as ``measurement.insert()`` will use +``size_meters`` as the parameter name, and a SQL expression such as +``measurement.c.size_meters > 10`` will derive the bound parameter name +from the ``size_meters`` key as well. + +.. versionchanged:: 1.0.0 - SQL expressions will use :attr:`.Column.key` + as the source of naming when anonymous bound parameters are created + in SQL expressions; previously, this behavior only applied to + :meth:`.Table.insert` and :meth:`.Table.update` parameter names. + +The other solution is to use a positional format; psycopg2 allows use of the +"format" paramstyle, which can be passed to +:paramref:`.create_engine.paramstyle`:: + + engine = create_engine( + 'postgresql://scott:tiger@localhost:5432/test', paramstyle='format') + +With the above engine, instead of a statement like:: + + INSERT INTO measurement ("Size (meters)") VALUES (%(Size (meters))s) + {'Size (meters)': 1} + +we instead see:: + + INSERT INTO measurement ("Size (meters)") VALUES (%s) + (1, ) + +Where above, the dictionary style is converted into a tuple with positional +style. + Transactions ------------ @@ -127,11 +231,12 @@ As discussed in :ref:`postgresql_isolation_level`, all Postgresql dialects support setting of transaction isolation level both via the ``isolation_level`` parameter passed to :func:`.create_engine`, -as well as the ``isolation_level`` argument used by :meth:`.Connection.execution_options`. -When using the psycopg2 dialect, these options make use of -psycopg2's ``set_isolation_level()`` connection method, rather than -emitting a Postgresql directive; this is because psycopg2's API-level -setting is always emitted at the start of each transaction in any case. +as well as the ``isolation_level`` argument used by +:meth:`.Connection.execution_options`. When using the psycopg2 dialect, these +options make use of psycopg2's ``set_isolation_level()`` connection method, +rather than emitting a Postgresql directive; this is because psycopg2's +API-level setting is always emitted at the start of each transaction in any +case. The psycopg2 dialect supports these constants for isolation level: @@ -142,7 +247,13 @@ * ``AUTOCOMMIT`` .. versionadded:: 0.8.2 support for AUTOCOMMIT isolation level when using - psycopg2. + psycopg2. + +.. seealso:: + + :ref:`postgresql_isolation_level` + + :ref:`pg8000_isolation_level` NOTICE logging @@ -159,35 +270,41 @@ HSTORE type ------------ -The ``psycopg2`` DBAPI includes an extension to natively handle marshalling of the -HSTORE type. The SQLAlchemy psycopg2 dialect will enable this extension -by default when it is detected that the target database has the HSTORE -type set up for use. In other words, when the dialect makes the first +The ``psycopg2`` DBAPI includes an extension to natively handle marshalling of +the HSTORE type. The SQLAlchemy psycopg2 dialect will enable this extension +by default when psycopg2 version 2.4 or greater is used, and +it is detected that the target database has the HSTORE type set up for use. +In other words, when the dialect makes the first connection, a sequence like the following is performed: -1. Request the available HSTORE oids using ``psycopg2.extras.HstoreAdapter.get_oids()``. - If this function returns a list of HSTORE identifiers, we then determine that - the ``HSTORE`` extension is present. +1. Request the available HSTORE oids using + ``psycopg2.extras.HstoreAdapter.get_oids()``. + If this function returns a list of HSTORE identifiers, we then determine + that the ``HSTORE`` extension is present. + This function is **skipped** if the version of psycopg2 installed is + less than version 2.4. -2. If the ``use_native_hstore`` flag is at it's default of ``True``, and +2. If the ``use_native_hstore`` flag is at its default of ``True``, and we've detected that ``HSTORE`` oids are available, the ``psycopg2.extensions.register_hstore()`` extension is invoked for all connections. -The ``register_hstore()`` extension has the effect of **all Python dictionaries -being accepted as parameters regardless of the type of target column in SQL**. -The dictionaries are converted by this extension into a textual HSTORE expression. -If this behavior is not desired, disable the -use of the hstore extension by setting ``use_native_hstore`` to ``False`` as follows:: +The ``register_hstore()`` extension has the effect of **all Python +dictionaries being accepted as parameters regardless of the type of target +column in SQL**. The dictionaries are converted by this extension into a +textual HSTORE expression. If this behavior is not desired, disable the +use of the hstore extension by setting ``use_native_hstore`` to ``False`` as +follows:: engine = create_engine("postgresql+psycopg2://scott:tiger@localhost/test", use_native_hstore=False) -The ``HSTORE`` type is **still supported** when the ``psycopg2.extensions.register_hstore()`` -extension is not used. It merely means that the coercion between Python dictionaries and the HSTORE +The ``HSTORE`` type is **still supported** when the +``psycopg2.extensions.register_hstore()`` extension is not used. It merely +means that the coercion between Python dictionaries and the HSTORE string format, on both the parameter side and the result side, will take -place within SQLAlchemy's own marshalling logic, and not that of ``psycopg2`` which -may be more performant. +place within SQLAlchemy's own marshalling logic, and not that of ``psycopg2`` +which may be more performant. """ from __future__ import absolute_import @@ -202,11 +319,16 @@ from ...sql import expression from ... import types as sqltypes from .base import PGDialect, PGCompiler, \ - PGIdentifierPreparer, PGExecutionContext, \ - ENUM, ARRAY, _DECIMAL_TYPES, _FLOAT_TYPES,\ - _INT_TYPES + PGIdentifierPreparer, PGExecutionContext, \ + ENUM, ARRAY, _DECIMAL_TYPES, _FLOAT_TYPES,\ + _INT_TYPES, UUID from .hstore import HSTORE -from .json import JSON +from .json import JSON, JSONB + +try: + from uuid import UUID as _python_UUID +except ImportError: + _python_UUID = None logger = logging.getLogger('sqlalchemy.dialects.postgresql') @@ -220,14 +342,14 @@ def result_processor(self, dialect, coltype): if self.asdecimal: if coltype in _FLOAT_TYPES: return processors.to_decimal_processor_factory( - decimal.Decimal, - self._effective_decimal_return_scale) + decimal.Decimal, + self._effective_decimal_return_scale) elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES: # pg8000 returns Decimal natively for 1700 return None else: raise exc.InvalidRequestError( - "Unknown PG numeric type: %d" % coltype) + "Unknown PG numeric type: %d" % coltype) else: if coltype in _FLOAT_TYPES: # pg8000 returns float natively for 701 @@ -236,18 +358,19 @@ def result_processor(self, dialect, coltype): return processors.to_float else: raise exc.InvalidRequestError( - "Unknown PG numeric type: %d" % coltype) + "Unknown PG numeric type: %d" % coltype) class _PGEnum(ENUM): def result_processor(self, dialect, coltype): - if util.py2k and self.convert_unicode is True: + if self.native_enum and util.py2k and self.convert_unicode is True: # we can't easily use PG's extensions here because # the OID is on the fly, and we need to give it a python # function anyway - not really worth it. self.convert_unicode = "force_nocheck" return super(_PGEnum, self).result_processor(dialect, coltype) + class _PGHStore(HSTORE): def bind_processor(self, dialect): if dialect._has_native_hstore: @@ -270,7 +393,36 @@ def result_processor(self, dialect, coltype): else: return super(_PGJSON, self).result_processor(dialect, coltype) -# When we're handed literal SQL, ensure it's a SELECT-query. Since + +class _PGJSONB(JSONB): + + def result_processor(self, dialect, coltype): + if dialect._has_native_jsonb: + return None + else: + return super(_PGJSONB, self).result_processor(dialect, coltype) + + +class _PGUUID(UUID): + def bind_processor(self, dialect): + if not self.as_uuid and dialect.use_native_uuid: + nonetype = type(None) + + def process(value): + if value is not None: + value = _python_UUID(value) + return value + return process + + def result_processor(self, dialect, coltype): + if not self.as_uuid and dialect.use_native_uuid: + def process(value): + if value is not None: + value = str(value) + return value + return process + +# When we're handed literal SQL, ensure it's a SELECT query. Since # 8.3, combining cursors and "FOR UPDATE" has been fine. SERVER_SIDE_CURSOR_RE = re.compile( r'\s*SELECT', @@ -286,13 +438,16 @@ def create_cursor(self): if self.dialect.server_side_cursors: is_server_side = \ self.execution_options.get('stream_results', True) and ( - (self.compiled and isinstance(self.compiled.statement, expression.Selectable) \ - or \ - ( + (self.compiled and isinstance(self.compiled.statement, + expression.Selectable) + or + ( (not self.compiled or - isinstance(self.compiled.statement, expression.TextClause)) - and self.statement and SERVER_SIDE_CURSOR_RE.match(self.statement)) - ) + isinstance(self.compiled.statement, + expression.TextClause)) + and self.statement and SERVER_SIDE_CURSOR_RE.match( + self.statement)) + ) ) else: is_server_side = \ @@ -302,7 +457,8 @@ def create_cursor(self): if is_server_side: # use server-side cursors: # http://lists.initd.org/pipermail/psycopg/2007-January/005251.html - ident = "c_%s_%s" % (hex(id(self))[2:], hex(_server_side_id())[2:]) + ident = "c_%s_%s" % (hex(id(self))[2:], + hex(_server_side_id())[2:]) return self._dbapi_connection.cursor(ident) else: return self._dbapi_connection.cursor() @@ -329,7 +485,7 @@ def _log_notices(self, cursor): class PGCompiler_psycopg2(PGCompiler): def visit_mod_binary(self, binary, operator, **kw): return self.process(binary.left, **kw) + " %% " + \ - self.process(binary.right, **kw) + self.process(binary.right, **kw) def post_process_text(self, text): return text.replace('%', '%%') @@ -347,14 +503,28 @@ class PGDialect_psycopg2(PGDialect): supports_unicode_statements = False default_paramstyle = 'pyformat' - supports_sane_multi_rowcount = False # set to true based on psycopg2 version + # set to true based on psycopg2 version + supports_sane_multi_rowcount = False execution_ctx_cls = PGExecutionContext_psycopg2 statement_compiler = PGCompiler_psycopg2 preparer = PGIdentifierPreparer_psycopg2 psycopg2_version = (0, 0) + FEATURE_VERSION_MAP = dict( + native_json=(2, 5), + native_jsonb=(2, 5, 4), + sane_multi_rowcount=(2, 0, 9), + array_oid=(2, 4, 3), + hstore_adapter=(2, 4) + ) + _has_native_hstore = False _has_native_json = False + _has_native_jsonb = False + + engine_config_types = PGDialect.engine_config_types.union([ + ('use_native_unicode', util.asbool), + ]) colspecs = util.update_copy( PGDialect.colspecs, @@ -363,47 +533,65 @@ class PGDialect_psycopg2(PGDialect): ENUM: _PGEnum, # needs force_unicode sqltypes.Enum: _PGEnum, # needs force_unicode HSTORE: _PGHStore, - JSON: _PGJSON + JSON: _PGJSON, + JSONB: _PGJSONB, + UUID: _PGUUID } ) def __init__(self, server_side_cursors=False, use_native_unicode=True, - client_encoding=None, - use_native_hstore=True, - **kwargs): + client_encoding=None, + use_native_hstore=True, use_native_uuid=True, + **kwargs): PGDialect.__init__(self, **kwargs) self.server_side_cursors = server_side_cursors self.use_native_unicode = use_native_unicode self.use_native_hstore = use_native_hstore + self.use_native_uuid = use_native_uuid self.supports_unicode_binds = use_native_unicode self.client_encoding = client_encoding if self.dbapi and hasattr(self.dbapi, '__version__'): m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?', - self.dbapi.__version__) + self.dbapi.__version__) if m: self.psycopg2_version = tuple( - int(x) - for x in m.group(1, 2, 3) - if x is not None) + int(x) + for x in m.group(1, 2, 3) + if x is not None) def initialize(self, connection): super(PGDialect_psycopg2, self).initialize(connection) self._has_native_hstore = self.use_native_hstore and \ - self._hstore_oids(connection.connection) \ - is not None - self._has_native_json = self.psycopg2_version >= (2, 5) + self._hstore_oids(connection.connection) \ + is not None + self._has_native_json = \ + self.psycopg2_version >= self.FEATURE_VERSION_MAP['native_json'] + self._has_native_jsonb = \ + self.psycopg2_version >= self.FEATURE_VERSION_MAP['native_jsonb'] # http://initd.org/psycopg/docs/news.html#what-s-new-in-psycopg-2-0-9 - self.supports_sane_multi_rowcount = self.psycopg2_version >= (2, 0, 9) + self.supports_sane_multi_rowcount = \ + self.psycopg2_version >= \ + self.FEATURE_VERSION_MAP['sane_multi_rowcount'] @classmethod def dbapi(cls): import psycopg2 return psycopg2 + @classmethod + def _psycopg2_extensions(cls): + from psycopg2 import extensions + return extensions + + @classmethod + def _psycopg2_extras(cls): + from psycopg2 import extras + return extras + @util.memoized_property def _isolation_lookup(self): - from psycopg2 import extensions + extensions = self._psycopg2_extensions() return { 'AUTOCOMMIT': extensions.ISOLATION_LEVEL_AUTOCOMMIT, 'READ COMMITTED': extensions.ISOLATION_LEVEL_READ_COMMITTED, @@ -420,12 +608,13 @@ def set_isolation_level(self, connection, level): "Invalid value '%s' for isolation_level. " "Valid isolation levels for %s are %s" % (level, self.name, ", ".join(self._isolation_lookup)) - ) + ) connection.set_isolation_level(level) def on_connect(self): - from psycopg2 import extras, extensions + extras = self._psycopg2_extras() + extensions = self._psycopg2_extensions() fns = [] if self.client_encoding is not None: @@ -438,6 +627,11 @@ def on_connect(conn): self.set_isolation_level(conn, self.isolation_level) fns.append(on_connect) + if self.dbapi and self.use_native_uuid: + def on_connect(conn): + extras.register_uuid(None, conn) + fns.append(on_connect) + if self.dbapi and self.use_native_unicode: def on_connect(conn): extensions.register_type(extensions.UNICODE, conn) @@ -449,18 +643,23 @@ def on_connect(conn): hstore_oids = self._hstore_oids(conn) if hstore_oids is not None: oid, array_oid = hstore_oids + kw = {'oid': oid} if util.py2k: - extras.register_hstore(conn, oid=oid, - array_oid=array_oid, - unicode=True) - else: - extras.register_hstore(conn, oid=oid, - array_oid=array_oid) + kw['unicode'] = True + if self.psycopg2_version >= \ + self.FEATURE_VERSION_MAP['array_oid']: + kw['array_oid'] = array_oid + extras.register_hstore(conn, **kw) fns.append(on_connect) if self.dbapi and self._json_deserializer: def on_connect(conn): - extras.register_default_json(conn, loads=self._json_deserializer) + if self._has_native_json: + extras.register_default_json( + conn, loads=self._json_deserializer) + if self._has_native_jsonb: + extras.register_default_jsonb( + conn, loads=self._json_deserializer) fns.append(on_connect) if fns: @@ -473,8 +672,8 @@ def on_connect(conn): @util.memoized_instancemethod def _hstore_oids(self, conn): - if self.psycopg2_version >= (2, 4): - from psycopg2 import extras + if self.psycopg2_version >= self.FEATURE_VERSION_MAP['hstore_adapter']: + extras = self._psycopg2_extras() oids = extras.HstoreAdapter.get_oids(conn) if oids is not None and oids[0]: return oids[0:2] @@ -489,23 +688,36 @@ def create_connect_args(self, url): def is_disconnect(self, e, connection, cursor): if isinstance(e, self.dbapi.Error): + # check the "closed" flag. this might not be + # present on old psycopg2 versions. Also, + # this flag doesn't actually help in a lot of disconnect + # situations, so don't rely on it. + if getattr(connection, 'closed', False): + return True + + # checks based on strings. in the case that .closed + # didn't cut it, fall back onto these. str_e = str(e).partition("\n")[0] for msg in [ # these error messages from libpq: interfaces/libpq/fe-misc.c # and interfaces/libpq/fe-secure.c. - # TODO: these are sent through gettext in libpq and we can't - # check within other locales - consider using connection.closed 'terminating connection', 'closed the connection', 'connection not open', 'could not receive data from server', 'could not send data to server', - # psycopg2 client errors, psycopg2/conenction.h, psycopg2/cursor.h + # psycopg2 client errors, psycopg2/conenction.h, + # psycopg2/cursor.h 'connection already closed', 'cursor already closed', # not sure where this path is originally from, it may # be obsolete. It really says "losed", not "closed". - 'losed the connection unexpectedly' + 'losed the connection unexpectedly', + # these can occur in newer SSL + 'connection has been closed unexpectedly', + 'SSL SYSCALL error: Bad file descriptor', + 'SSL SYSCALL error: EOF detected', + 'SSL error: decryption failed or bad record mac', ]: idx = str_e.find(msg) if idx >= 0 and '"' not in str_e[:idx]: diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py b/lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py new file mode 100644 index 0000000000..ab99a8392f --- /dev/null +++ b/lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py @@ -0,0 +1,61 @@ +# testing/engines.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +""" +.. dialect:: postgresql+psycopg2cffi + :name: psycopg2cffi + :dbapi: psycopg2cffi + :connectstring: \ +postgresql+psycopg2cffi://user:password@host:port/dbname\ +[?key=value&key=value...] + :url: http://pypi.python.org/pypi/psycopg2cffi/ + +``psycopg2cffi`` is an adaptation of ``psycopg2``, using CFFI for the C +layer. This makes it suitable for use in e.g. PyPy. Documentation +is as per ``psycopg2``. + +.. versionadded:: 1.0.0 + +.. seealso:: + + :mod:`sqlalchemy.dialects.postgresql.psycopg2` + +""" +from .psycopg2 import PGDialect_psycopg2 + + +class PGDialect_psycopg2cffi(PGDialect_psycopg2): + driver = 'psycopg2cffi' + supports_unicode_statements = True + + # psycopg2cffi's first release is 2.5.0, but reports + # __version__ as 2.4.4. Subsequent releases seem to have + # fixed this. + + FEATURE_VERSION_MAP = dict( + native_json=(2, 4, 4), + native_jsonb=(2, 7, 1), + sane_multi_rowcount=(2, 4, 4), + array_oid=(2, 4, 4), + hstore_adapter=(2, 4, 4) + ) + + @classmethod + def dbapi(cls): + return __import__('psycopg2cffi') + + @classmethod + def _psycopg2_extensions(cls): + root = __import__('psycopg2cffi', fromlist=['extensions']) + return root.extensions + + @classmethod + def _psycopg2_extras(cls): + root = __import__('psycopg2cffi', fromlist=['extras']) + return root.extras + + +dialect = PGDialect_psycopg2cffi diff --git a/lib/sqlalchemy/dialects/postgresql/pypostgresql.py b/lib/sqlalchemy/dialects/postgresql/pypostgresql.py index f030d2c1b2..f2b850a9ad 100644 --- a/lib/sqlalchemy/dialects/postgresql/pypostgresql.py +++ b/lib/sqlalchemy/dialects/postgresql/pypostgresql.py @@ -1,5 +1,6 @@ # postgresql/pypostgresql.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -8,7 +9,8 @@ .. dialect:: postgresql+pypostgresql :name: py-postgresql :dbapi: pypostgresql - :connectstring: postgresql+pypostgresql://user:password@host:port/dbname[?key=value&key=value...] + :connectstring: postgresql+pypostgresql://user:password@host:port/dbname\ +[?key=value&key=value...] :url: http://python.projects.pgfoundry.org/ @@ -63,6 +65,23 @@ def dbapi(cls): from postgresql.driver import dbapi20 return dbapi20 + _DBAPI_ERROR_NAMES = [ + "Error", + "InterfaceError", "DatabaseError", "DataError", + "OperationalError", "IntegrityError", "InternalError", + "ProgrammingError", "NotSupportedError" + ] + + @util.memoized_property + def dbapi_exception_translation_map(self): + if self.dbapi is None: + return {} + + return dict( + (getattr(self.dbapi, name).__name__, name) + for name in self._DBAPI_ERROR_NAMES + ) + def create_connect_args(self, url): opts = url.translate_connect_args(username='user') if 'port' in opts: diff --git a/lib/sqlalchemy/dialects/postgresql/ranges.py b/lib/sqlalchemy/dialects/postgresql/ranges.py index 57b0c4c30f..42a1cd4b17 100644 --- a/lib/sqlalchemy/dialects/postgresql/ranges.py +++ b/lib/sqlalchemy/dialects/postgresql/ranges.py @@ -1,4 +1,5 @@ -# Copyright (C) 2013-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2013-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -8,6 +9,7 @@ __all__ = ('INT4RANGE', 'INT8RANGE', 'NUMRANGE') + class RangeOperators(object): """ This mixin provides functionality for the Range Operators @@ -93,6 +95,7 @@ def __add__(self, other): """ return self.expr.op('+')(other) + class INT4RANGE(RangeOperators, sqltypes.TypeEngine): """Represent the Postgresql INT4RANGE type. @@ -104,6 +107,7 @@ class INT4RANGE(RangeOperators, sqltypes.TypeEngine): ischema_names['int4range'] = INT4RANGE + class INT8RANGE(RangeOperators, sqltypes.TypeEngine): """Represent the Postgresql INT8RANGE type. @@ -115,6 +119,7 @@ class INT8RANGE(RangeOperators, sqltypes.TypeEngine): ischema_names['int8range'] = INT8RANGE + class NUMRANGE(RangeOperators, sqltypes.TypeEngine): """Represent the Postgresql NUMRANGE type. @@ -126,6 +131,7 @@ class NUMRANGE(RangeOperators, sqltypes.TypeEngine): ischema_names['numrange'] = NUMRANGE + class DATERANGE(RangeOperators, sqltypes.TypeEngine): """Represent the Postgresql DATERANGE type. @@ -137,6 +143,7 @@ class DATERANGE(RangeOperators, sqltypes.TypeEngine): ischema_names['daterange'] = DATERANGE + class TSRANGE(RangeOperators, sqltypes.TypeEngine): """Represent the Postgresql TSRANGE type. @@ -148,6 +155,7 @@ class TSRANGE(RangeOperators, sqltypes.TypeEngine): ischema_names['tsrange'] = TSRANGE + class TSTZRANGE(RangeOperators, sqltypes.TypeEngine): """Represent the Postgresql TSTZRANGE type. diff --git a/lib/sqlalchemy/dialects/postgresql/zxjdbc.py b/lib/sqlalchemy/dialects/postgresql/zxjdbc.py index 67e7d53e6e..cc464601b3 100644 --- a/lib/sqlalchemy/dialects/postgresql/zxjdbc.py +++ b/lib/sqlalchemy/dialects/postgresql/zxjdbc.py @@ -1,5 +1,6 @@ # postgresql/zxjdbc.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/lib/sqlalchemy/dialects/sqlite/__init__.py b/lib/sqlalchemy/dialects/sqlite/__init__.py index 80846c9ec7..a8dec300aa 100644 --- a/lib/sqlalchemy/dialects/sqlite/__init__.py +++ b/lib/sqlalchemy/dialects/sqlite/__init__.py @@ -1,10 +1,11 @@ # sqlite/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -from sqlalchemy.dialects.sqlite import base, pysqlite +from sqlalchemy.dialects.sqlite import base, pysqlite, pysqlcipher # default dialect base.dialect = pysqlite.dialect diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py index 90df9c1928..e623ff06cc 100644 --- a/lib/sqlalchemy/dialects/sqlite/base.py +++ b/lib/sqlalchemy/dialects/sqlite/base.py @@ -1,5 +1,6 @@ # sqlite/base.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -8,6 +9,7 @@ .. dialect:: sqlite :name: SQLite +.. _sqlite_datetime: Date and Time Types ------------------- @@ -22,72 +24,216 @@ nicely support ordering. There's no reliance on typical "libc" internals for these functions so historical dates are fully supported. -Auto Incrementing Behavior --------------------------- +Ensuring Text affinity +^^^^^^^^^^^^^^^^^^^^^^ + +The DDL rendered for these types is the standard ``DATE``, ``TIME`` +and ``DATETIME`` indicators. However, custom storage formats can also be +applied to these types. When the +storage format is detected as containing no alpha characters, the DDL for +these types is rendered as ``DATE_CHAR``, ``TIME_CHAR``, and ``DATETIME_CHAR``, +so that the column continues to have textual affinity. + +.. seealso:: + + `Type Affinity `_ - in the SQLite documentation + +.. _sqlite_autoincrement: + +SQLite Auto Incrementing Behavior +---------------------------------- Background on SQLite's autoincrement is at: http://sqlite.org/autoinc.html -Two things to note: +Key concepts: -* The AUTOINCREMENT keyword is **not** required for SQLite tables to - generate primary key values automatically. AUTOINCREMENT only means that the - algorithm used to generate ROWID values should be slightly different. -* SQLite does **not** generate primary key (i.e. ROWID) values, even for - one column, if the table has a composite (i.e. multi-column) primary key. - This is regardless of the AUTOINCREMENT keyword being present or not. +* SQLite has an implicit "auto increment" feature that takes place for any + non-composite primary-key column that is specifically created using + "INTEGER PRIMARY KEY" for the type + primary key. -To specifically render the AUTOINCREMENT keyword on the primary key column when -rendering DDL, add the flag ``sqlite_autoincrement=True`` to the Table +* SQLite also has an explicit "AUTOINCREMENT" keyword, that is **not** + equivalent to the implicit autoincrement feature; this keyword is not + recommended for general use. SQLAlchemy does not render this keyword + unless a special SQLite-specific directive is used (see below). However, + it still requires that the column's type is named "INTEGER". + +Using the AUTOINCREMENT Keyword +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +To specifically render the AUTOINCREMENT keyword on the primary key column +when rendering DDL, add the flag ``sqlite_autoincrement=True`` to the Table construct:: Table('sometable', metadata, Column('id', Integer, primary_key=True), sqlite_autoincrement=True) -Transaction Isolation Level ---------------------------- +Allowing autoincrement behavior SQLAlchemy types other than Integer/INTEGER +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +SQLite's typing model is based on naming conventions. Among +other things, this means that any type name which contains the +substring ``"INT"`` will be determined to be of "integer affinity". A +type named ``"BIGINT"``, ``"SPECIAL_INT"`` or even ``"XYZINTQPR"``, will be considered by +SQLite to be of "integer" affinity. However, **the SQLite +autoincrement feature, whether implicitly or explicitly enabled, +requires that the name of the column's type +is exactly the string "INTEGER"**. Therefore, if an +application uses a type like :class:`.BigInteger` for a primary key, on +SQLite this type will need to be rendered as the name ``"INTEGER"`` when +emitting the initial ``CREATE TABLE`` statement in order for the autoincrement +behavior to be available. + +One approach to achieve this is to use :class:`.Integer` on SQLite +only using :meth:`.TypeEngine.with_variant`:: + + table = Table( + "my_table", metadata, + Column("id", BigInteger().with_variant(Integer, "sqlite"), primary_key=True) + ) + +Another is to use a subclass of :class:`.BigInteger` that overrides its DDL name +to be ``INTEGER`` when compiled against SQLite:: + + from sqlalchemy import BigInteger + from sqlalchemy.ext.compiler import compiles + + class SLBigInteger(BigInteger): + pass + + @compiles(SLBigInteger, 'sqlite') + def bi_c(element, compiler, **kw): + return "INTEGER" + + @compiles(SLBigInteger) + def bi_c(element, compiler, **kw): + return compiler.visit_BIGINT(element, **kw) + + + table = Table( + "my_table", metadata, + Column("id", SLBigInteger(), primary_key=True) + ) + +.. seealso:: + + :meth:`.TypeEngine.with_variant` + + :ref:`sqlalchemy.ext.compiler_toplevel` -:func:`.create_engine` accepts an ``isolation_level`` parameter which results -in the command ``PRAGMA read_uncommitted `` being invoked for every new -connection. Valid values for this parameter are ``SERIALIZABLE`` and ``READ -UNCOMMITTED`` corresponding to a value of 0 and 1, respectively. See the -section :ref:`pysqlite_serializable` for an important workaround when using -serializable isolation with Pysqlite. + `Datatypes In SQLite Version 3 `_ + +.. _sqlite_concurrency: Database Locking Behavior / Concurrency --------------------------------------- -Note that SQLite is not designed for a high level of concurrency. The database -itself, being a file, is locked completely during write operations and within -transactions, meaning exactly one connection has exclusive access to the -database during this period - all other connections will be blocked during this -time. - -The Python DBAPI specification also calls for a connection model that is always -in a transaction; there is no BEGIN method, only commit and rollback. This -implies that a SQLite DBAPI driver would technically allow only serialized -access to a particular database file at all times. The pysqlite driver attempts -to ameliorate this by deferring the actual BEGIN statement until the first DML -(INSERT, UPDATE, or DELETE) is received within a transaction. While this breaks -serializable isolation, it at least delays the exclusive locking inherent in -SQLite's design. - -SQLAlchemy's default mode of usage with the ORM is known as "autocommit=False", -which means the moment the :class:`.Session` begins to be used, a transaction -is begun. As the :class:`.Session` is used, the autoflush feature, also on by -default, will flush out pending changes to the database before each query. The -effect of this is that a :class:`.Session` used in its default mode will often -emit DML early on, long before the transaction is actually committed. This -again will have the effect of serializing access to the SQLite database. If -highly concurrent reads are desired against the SQLite database, it is advised -that the autoflush feature be disabled, and potentially even that autocommit be -re-enabled, which has the effect of each SQL statement and flush committing -changes immediately. - -For more information on SQLite's lack of concurrency by design, please see +SQLite is not designed for a high level of write concurrency. The database +itself, being a file, is locked completely during write operations within +transactions, meaning exactly one "connection" (in reality a file handle) +has exclusive access to the database during this period - all other +"connections" will be blocked during this time. + +The Python DBAPI specification also calls for a connection model that is +always in a transaction; there is no ``connection.begin()`` method, +only ``connection.commit()`` and ``connection.rollback()``, upon which a +new transaction is to be begun immediately. This may seem to imply +that the SQLite driver would in theory allow only a single filehandle on a +particular database file at any time; however, there are several +factors both within SQlite itself as well as within the pysqlite driver +which loosen this restriction significantly. + +However, no matter what locking modes are used, SQLite will still always +lock the database file once a transaction is started and DML (e.g. INSERT, +UPDATE, DELETE) has at least been emitted, and this will block +other transactions at least at the point that they also attempt to emit DML. +By default, the length of time on this block is very short before it times out +with an error. + +This behavior becomes more critical when used in conjunction with the +SQLAlchemy ORM. SQLAlchemy's :class:`.Session` object by default runs +within a transaction, and with its autoflush model, may emit DML preceding +any SELECT statement. This may lead to a SQLite database that locks +more quickly than is expected. The locking mode of SQLite and the pysqlite +driver can be manipulated to some degree, however it should be noted that +achieving a high degree of write-concurrency with SQLite is a losing battle. + +For more information on SQLite's lack of write concurrency by design, please +see `Situations Where Another RDBMS May Work Better - High Concurrency `_ near the bottom of the page. +The following subsections introduce areas that are impacted by SQLite's +file-based architecture and additionally will usually require workarounds to +work when using the pysqlite driver. + +.. _sqlite_isolation_level: + +Transaction Isolation Level +---------------------------- + +SQLite supports "transaction isolation" in a non-standard way, along two +axes. One is that of the `PRAGMA read_uncommitted `_ +instruction. This setting can essentially switch SQLite between its +default mode of ``SERIALIZABLE`` isolation, and a "dirty read" isolation +mode normally referred to as ``READ UNCOMMITTED``. + +SQLAlchemy ties into this PRAGMA statement using the +:paramref:`.create_engine.isolation_level` parameter of :func:`.create_engine`. +Valid values for this parameter when used with SQLite are ``"SERIALIZABLE"`` +and ``"READ UNCOMMITTED"`` corresponding to a value of 0 and 1, respectively. +SQLite defaults to ``SERIALIZABLE``, however its behavior is impacted by +the pysqlite driver's default behavior. + +The other axis along which SQLite's transactional locking is impacted is +via the nature of the ``BEGIN`` statement used. The three varieties +are "deferred", "immediate", and "exclusive", as described at +`BEGIN TRANSACTION `_. A straight +``BEGIN`` statement uses the "deferred" mode, where the the database file is +not locked until the first read or write operation, and read access remains +open to other transactions until the first write operation. But again, +it is critical to note that the pysqlite driver interferes with this behavior +by *not even emitting BEGIN* until the first write operation. + +.. warning:: + + SQLite's transactional scope is impacted by unresolved + issues in the pysqlite driver, which defers BEGIN statements to a greater + degree than is often feasible. See the section :ref:`pysqlite_serializable` + for techniques to work around this behavior. + +SAVEPOINT Support +---------------------------- + +SQLite supports SAVEPOINTs, which only function once a transaction is +begun. SQLAlchemy's SAVEPOINT support is available using the +:meth:`.Connection.begin_nested` method at the Core level, and +:meth:`.Session.begin_nested` at the ORM level. However, SAVEPOINTs +won't work at all with pysqlite unless workarounds are taken. + +.. warning:: + + SQLite's SAVEPOINT feature is impacted by unresolved + issues in the pysqlite driver, which defers BEGIN statements to a greater + degree than is often feasible. See the section :ref:`pysqlite_serializable` + for techniques to work around this behavior. + +Transactional DDL +---------------------------- + +The SQLite database supports transactional :term:`DDL` as well. +In this case, the pysqlite driver is not only failing to start transactions, +it also is ending any existing transction when DDL is detected, so again, +workarounds are required. + +.. warning:: + + SQLite's transactional DDL is impacted by unresolved issues + in the pysqlite driver, which fails to emit BEGIN and additionally + forces a COMMIT to cancel any transaction when DDL is encountered. + See the section :ref:`pysqlite_serializable` + for techniques to work around this behavior. + .. _sqlite_foreign_keys: Foreign Key Support @@ -100,10 +246,10 @@ Constraint checking on SQLite has three prerequisites: * At least version 3.6.19 of SQLite must be in use -* The SQLite libary must be compiled *without* the SQLITE_OMIT_FOREIGN_KEY +* The SQLite library must be compiled *without* the SQLITE_OMIT_FOREIGN_KEY or SQLITE_OMIT_TRIGGER symbols enabled. -* The ``PRAGMA foreign_keys = ON`` statement must be emitted on all connections - before use. +* The ``PRAGMA foreign_keys = ON`` statement must be emitted on all + connections before use. SQLAlchemy allows for the ``PRAGMA`` statement to be emitted automatically for new connections through the usage of events:: @@ -117,13 +263,25 @@ def set_sqlite_pragma(dbapi_connection, connection_record): cursor.execute("PRAGMA foreign_keys=ON") cursor.close() +.. warning:: + + When SQLite foreign keys are enabled, it is **not possible** + to emit CREATE or DROP statements for tables that contain + mutually-dependent foreign key constraints; + to emit the DDL for these tables requires that ALTER TABLE be used to + create or drop these constraints separately, for which SQLite has + no support. + .. seealso:: - `SQLite Foreign Key Support `_ - on - the SQLite web site. + `SQLite Foreign Key Support `_ + - on the SQLite web site. :ref:`event_toplevel` - SQLAlchemy event API. + :ref:`use_alter` - more information on SQLAlchemy's facilities for handling + mutually-dependent foreign key constraints. + .. _sqlite_type_reflection: Type Reflection @@ -174,6 +332,133 @@ def set_sqlite_pragma(dbapi_connection, connection_record): .. versionadded:: 0.9.3 Support for SQLite type affinity rules when reflecting columns. + +.. _sqlite_partial_index: + +Partial Indexes +--------------- + +A partial index, e.g. one which uses a WHERE clause, can be specified +with the DDL system using the argument ``sqlite_where``:: + + tbl = Table('testtbl', m, Column('data', Integer)) + idx = Index('test_idx1', tbl.c.data, + sqlite_where=and_(tbl.c.data > 5, tbl.c.data < 10)) + +The index will be rendered at create time as:: + + CREATE INDEX test_idx1 ON testtbl (data) + WHERE data > 5 AND data < 10 + +.. versionadded:: 0.9.9 + +Dotted Column Names +------------------- + +Using table or column names that explicitly have periods in them is +**not recommended**. While this is generally a bad idea for relational +databases in general, as the dot is a syntactically significant character, +the SQLite driver up until version **3.10.0** of SQLite has a bug which +requires that SQLAlchemy filter out these dots in result sets. + +.. note:: + + The following SQLite issue has been resolved as of version 3.10.0 + of SQLite. SQLAlchemy as of **1.1** automatically disables its internal + workarounds based on detection of this version. + +The bug, entirely outside of SQLAlchemy, can be illustrated thusly:: + + import sqlite3 + + conn = sqlite3.connect(":memory:") + cursor = conn.cursor() + + cursor.execute("create table x (a integer, b integer)") + cursor.execute("insert into x (a, b) values (1, 1)") + cursor.execute("insert into x (a, b) values (2, 2)") + + cursor.execute("select x.a, x.b from x") + assert [c[0] for c in cursor.description] == ['a', 'b'] + + cursor.execute(''' + select x.a, x.b from x where a=1 + union + select x.a, x.b from x where a=2 + ''') + assert [c[0] for c in cursor.description] == ['a', 'b'], \\ + [c[0] for c in cursor.description] + +The second assertion fails:: + + Traceback (most recent call last): + File "test.py", line 19, in + [c[0] for c in cursor.description] + AssertionError: ['x.a', 'x.b'] + +Where above, the driver incorrectly reports the names of the columns +including the name of the table, which is entirely inconsistent vs. +when the UNION is not present. + +SQLAlchemy relies upon column names being predictable in how they match +to the original statement, so the SQLAlchemy dialect has no choice but +to filter these out:: + + + from sqlalchemy import create_engine + + eng = create_engine("sqlite://") + conn = eng.connect() + + conn.execute("create table x (a integer, b integer)") + conn.execute("insert into x (a, b) values (1, 1)") + conn.execute("insert into x (a, b) values (2, 2)") + + result = conn.execute("select x.a, x.b from x") + assert result.keys() == ["a", "b"] + + result = conn.execute(''' + select x.a, x.b from x where a=1 + union + select x.a, x.b from x where a=2 + ''') + assert result.keys() == ["a", "b"] + +Note that above, even though SQLAlchemy filters out the dots, *both +names are still addressable*:: + + >>> row = result.first() + >>> row["a"] + 1 + >>> row["x.a"] + 1 + >>> row["b"] + 1 + >>> row["x.b"] + 1 + +Therefore, the workaround applied by SQLAlchemy only impacts +:meth:`.ResultProxy.keys` and :meth:`.RowProxy.keys()` in the public API. +In the very specific case where +an application is forced to use column names that contain dots, and the +functionality of :meth:`.ResultProxy.keys` and :meth:`.RowProxy.keys()` +is required to return these dotted names unmodified, the ``sqlite_raw_colnames`` +execution option may be provided, either on a per-:class:`.Connection` basis:: + + result = conn.execution_options(sqlite_raw_colnames=True).execute(''' + select x.a, x.b from x where a=1 + union + select x.a, x.b from x where a=2 + ''') + assert result.keys() == ["x.a", "x.b"] + +or on a per-:class:`.Engine` basis:: + + engine = create_engine("sqlite://", execution_options={"sqlite_raw_colnames": True}) + +When using the per-:class:`.Engine` execution option, note that +**Core and ORM queries that use UNION may not function properly**. + """ import datetime @@ -186,8 +471,9 @@ def set_sqlite_pragma(dbapi_connection, connection_record): from ...engine import default, reflection from ...sql import compiler -from ...types import (BLOB, BOOLEAN, CHAR, DATE, DECIMAL, FLOAT, INTEGER, REAL, - NUMERIC, SMALLINT, TEXT, TIMESTAMP, VARCHAR) +from ...types import (BLOB, BOOLEAN, CHAR, DECIMAL, FLOAT, + INTEGER, REAL, NUMERIC, SMALLINT, TEXT, + TIMESTAMP, VARCHAR) class _DateTimeMixin(object): @@ -201,6 +487,25 @@ def __init__(self, storage_format=None, regexp=None, **kw): if storage_format is not None: self._storage_format = storage_format + @property + def format_is_text_affinity(self): + """return True if the storage format will automatically imply + a TEXT affinity. + + If the storage format contains no non-numeric characters, + it will imply a NUMERIC storage format on SQLite; in this case, + the type will generate its DDL as DATE_CHAR, DATETIME_CHAR, + TIME_CHAR. + + .. versionadded:: 1.0.0 + + """ + spec = self._storage_format % { + "year": 0, "month": 0, "day": 0, "hour": 0, + "minute": 0, "second": 0, "microsecond": 0 + } + return bool(re.search(r'[^0-9]', spec)) + def adapt(self, cls, **kw): if issubclass(cls, _DateTimeMixin): if self._storage_format: @@ -211,6 +516,7 @@ def adapt(self, cls, **kw): def literal_processor(self, dialect): bp = self.bind_processor(dialect) + def process(value): return "'%s'" % bp(value) return process @@ -221,7 +527,8 @@ class DATETIME(_DateTimeMixin, sqltypes.DateTime): The default string storage format is:: - "%(year)04d-%(month)02d-%(day)02d %(hour)02d:%(min)02d:%(second)02d.%(microsecond)06d" + "%(year)04d-%(month)02d-%(day)02d %(hour)02d:%(min)02d:\ +%(second)02d.%(microsecond)06d" e.g.:: @@ -234,17 +541,18 @@ class DATETIME(_DateTimeMixin, sqltypes.DateTime): from sqlalchemy.dialects.sqlite import DATETIME dt = DATETIME( - storage_format="%(year)04d/%(month)02d/%(day)02d %(hour)02d:%(min)02d:%(second)02d", + storage_format="%(year)04d/%(month)02d/%(day)02d %(hour)02d:\ +%(min)02d:%(second)02d", regexp=r"(\d+)/(\d+)/(\d+) (\d+)-(\d+)-(\d+)" ) - :param storage_format: format string which will be applied to the dict with - keys year, month, day, hour, minute, second, and microsecond. + :param storage_format: format string which will be applied to the dict + with keys year, month, day, hour, minute, second, and microsecond. :param regexp: regular expression which will be applied to incoming result rows. If the regexp contains named groups, the resulting match dict is applied to the Python datetime() constructor as keyword arguments. - Otherwise, if positional groups are used, the the datetime() constructor + Otherwise, if positional groups are used, the datetime() constructor is called with positional arguments via ``*map(int, match_obj.groups(0))``. """ @@ -337,7 +645,7 @@ class DATE(_DateTimeMixin, sqltypes.Date): incoming result rows. If the regexp contains named groups, the resulting match dict is applied to the Python date() constructor as keyword arguments. Otherwise, if positional groups are used, the - the date() constructor is called with positional arguments via + date() constructor is called with positional arguments via ``*map(int, match_obj.groups(0))``. """ @@ -387,17 +695,18 @@ class TIME(_DateTimeMixin, sqltypes.Time): from sqlalchemy.dialects.sqlite import TIME t = TIME( - storage_format="%(hour)02d-%(minute)02d-%(second)02d-%(microsecond)06d", + storage_format="%(hour)02d-%(minute)02d-%(second)02d-\ +%(microsecond)06d", regexp=re.compile("(\d+)-(\d+)-(\d+)-(?:-(\d+))?") ) - :param storage_format: format string which will be applied to the dict with - keys hour, minute, second, and microsecond. + :param storage_format: format string which will be applied to the dict + with keys hour, minute, second, and microsecond. :param regexp: regular expression which will be applied to incoming result rows. If the regexp contains named groups, the resulting match dict is applied to the Python time() constructor as keyword arguments. Otherwise, - if positional groups are used, the the time() constructor is called with + if positional groups are used, the time() constructor is called with positional arguments via ``*map(int, match_obj.groups(0))``. """ @@ -452,7 +761,9 @@ def result_processor(self, dialect, coltype): 'BOOLEAN': sqltypes.BOOLEAN, 'CHAR': sqltypes.CHAR, 'DATE': sqltypes.DATE, + 'DATE_CHAR': sqltypes.DATE, 'DATETIME': sqltypes.DATETIME, + 'DATETIME_CHAR': sqltypes.DATETIME, 'DOUBLE': sqltypes.FLOAT, 'DECIMAL': sqltypes.DECIMAL, 'FLOAT': sqltypes.FLOAT, @@ -463,6 +774,7 @@ def result_processor(self, dialect, coltype): 'SMALLINT': sqltypes.SMALLINT, 'TEXT': sqltypes.TEXT, 'TIME': sqltypes.TIME, + 'TIME_CHAR': sqltypes.TIME, 'TIMESTAMP': sqltypes.TIMESTAMP, 'VARCHAR': sqltypes.VARCHAR, 'NVARCHAR': sqltypes.NVARCHAR, @@ -517,19 +829,19 @@ def visit_extract(self, extract, **kw): raise exc.CompileError( "%s is not a valid extract argument." % extract.field) - def limit_clause(self, select): + def limit_clause(self, select, **kw): text = "" - if select._limit is not None: - text += "\n LIMIT " + self.process(sql.literal(select._limit)) - if select._offset is not None: - if select._limit is None: + if select._limit_clause is not None: + text += "\n LIMIT " + self.process(select._limit_clause, **kw) + if select._offset_clause is not None: + if select._limit_clause is None: text += "\n LIMIT " + self.process(sql.literal(-1)) - text += " OFFSET " + self.process(sql.literal(select._offset)) + text += " OFFSET " + self.process(select._offset_clause, **kw) else: - text += " OFFSET " + self.process(sql.literal(0)) + text += " OFFSET " + self.process(sql.literal(0), **kw) return text - def for_update_clause(self, select): + def for_update_clause(self, select, **kw): # sqlite has no "FOR UPDATE" AFAICT return '' @@ -537,7 +849,8 @@ def for_update_clause(self, select): class SQLiteDDLCompiler(compiler.DDLCompiler): def get_column_specification(self, column, **kwargs): - coltype = self.dialect.type_compiler.process(column.type) + coltype = self.dialect.type_compiler.process( + column.type, type_expression=column) colspec = self.preparer.format_column(column) + " " + coltype default = self.get_column_default_string(column) if default is not None: @@ -572,14 +885,15 @@ def visit_primary_key_constraint(self, constraint): def visit_foreign_key_constraint(self, constraint): - local_table = list(constraint._elements.values())[0].parent.table - remote_table = list(constraint._elements.values())[0].column.table + local_table = constraint.elements[0].parent.table + remote_table = constraint.elements[0].column.table if local_table.schema != remote_table.schema: return None else: - return super(SQLiteDDLCompiler, self).visit_foreign_key_constraint( - constraint) + return super( + SQLiteDDLCompiler, + self).visit_foreign_key_constraint(constraint) def define_constraint_remote_table(self, constraint, table, preparer): """Format the remote table clause of a CREATE CONSTRAINT clause.""" @@ -587,14 +901,46 @@ def define_constraint_remote_table(self, constraint, table, preparer): return preparer.format_table(table, use_schema=False) def visit_create_index(self, create): - return super(SQLiteDDLCompiler, self).visit_create_index( + index = create.element + + text = super(SQLiteDDLCompiler, self).visit_create_index( create, include_table_schema=False) + whereclause = index.dialect_options["sqlite"]["where"] + if whereclause is not None: + where_compiled = self.sql_compiler.process( + whereclause, include_table=False, + literal_binds=True) + text += " WHERE " + where_compiled + + return text + class SQLiteTypeCompiler(compiler.GenericTypeCompiler): - def visit_large_binary(self, type_): + def visit_large_binary(self, type_, **kw): return self.visit_BLOB(type_) + def visit_DATETIME(self, type_, **kw): + if not isinstance(type_, _DateTimeMixin) or \ + type_.format_is_text_affinity: + return super(SQLiteTypeCompiler, self).visit_DATETIME(type_) + else: + return "DATETIME_CHAR" + + def visit_DATE(self, type_, **kw): + if not isinstance(type_, _DateTimeMixin) or \ + type_.format_is_text_affinity: + return super(SQLiteTypeCompiler, self).visit_DATE(type_) + else: + return "DATE_CHAR" + + def visit_TIME(self, type_, **kw): + if not isinstance(type_, _DateTimeMixin) or \ + type_.format_is_text_affinity: + return super(SQLiteTypeCompiler, self).visit_TIME(type_) + else: + return "TIME_CHAR" + class SQLiteIdentifierPreparer(compiler.IdentifierPreparer): reserved_words = set([ @@ -616,7 +962,7 @@ class SQLiteIdentifierPreparer(compiler.IdentifierPreparer): 'temporary', 'then', 'to', 'transaction', 'trigger', 'true', 'union', 'unique', 'update', 'using', 'vacuum', 'values', 'view', 'virtual', 'when', 'where', - ]) + ]) def format_index(self, index, use_schema=True, name=None): """Prepare a quoted index and schema name.""" @@ -627,8 +973,8 @@ def format_index(self, index, use_schema=True, name=None): if (not self.omit_schema and use_schema and getattr(index.table, "schema", None)): - result = self.quote_schema(index.table.schema, - index.table.quote_schema) + "." + result + result = self.quote_schema( + index.table.schema, index.table.quote_schema) + "." + result return result @@ -638,10 +984,15 @@ def _preserve_raw_colnames(self): return self.execution_options.get("sqlite_raw_colnames", False) def _translate_colname(self, colname): - # adjust for dotted column names. SQLite in the case of UNION may store - # col names as "tablename.colname" in cursor.description + # TODO: detect SQLite version 3.10.0 or greater; + # see [ticket:3633] + + # adjust for dotted column names. SQLite + # in the case of UNION may store col names as + # "tablename.colname", or if using an attached database, + # "database.tablename.colname", in cursor.description if not self._preserve_raw_colnames and "." in colname: - return colname.split(".")[1], colname + return colname.split(".")[-1], colname else: return colname, None @@ -655,6 +1006,9 @@ class SQLiteDialect(default.DefaultDialect): supports_empty_insert = False supports_cast = True supports_multivalues_insert = True + + # TODO: detect version 3.7.16 or greater; + # see [ticket:3634] supports_right_nested_joins = False default_paramstyle = 'qmark' @@ -673,7 +1027,10 @@ class SQLiteDialect(default.DefaultDialect): construct_arguments = [ (sa_schema.Table, { "autoincrement": False - }) + }), + (sa_schema.Index, { + "where": None, + }), ] _broken_fk_pragma_quotes = False @@ -682,9 +1039,10 @@ def __init__(self, isolation_level=None, native_datetime=False, **kwargs): default.DefaultDialect.__init__(self, **kwargs) self.isolation_level = isolation_level - # this flag used by pysqlite dialect, and perhaps others in the future, - # to indicate the driver is handling date/timestamp conversions (and - # perhaps datetime/time as well on some hypothetical driver ?) + # this flag used by pysqlite dialect, and perhaps others in the + # future, to indicate the driver is handling date/timestamp + # conversions (and perhaps datetime/time as well on some hypothetical + # driver ?) self.native_datetime = native_datetime if self.dbapi is not None: @@ -713,7 +1071,7 @@ def set_isolation_level(self, connection, level): "Invalid value '%s' for isolation_level. " "Valid isolation levels for %s are %s" % (level, self.name, ", ".join(self._isolation_lookup)) - ) + ) cursor = connection.cursor() cursor.execute("PRAGMA read_uncommitted = %d" % isolation_level) cursor.close() @@ -753,60 +1111,44 @@ def get_table_names(self, connection, schema=None, **kw): if schema is not None: qschema = self.identifier_preparer.quote_identifier(schema) master = '%s.sqlite_master' % qschema - s = ("SELECT name FROM %s " - "WHERE type='table' ORDER BY name") % (master,) - rs = connection.execute(s) else: - try: - s = ("SELECT name FROM " - " (SELECT * FROM sqlite_master UNION ALL " - " SELECT * FROM sqlite_temp_master) " - "WHERE type='table' ORDER BY name") - rs = connection.execute(s) - except exc.DBAPIError: - s = ("SELECT name FROM sqlite_master " - "WHERE type='table' ORDER BY name") - rs = connection.execute(s) + master = "sqlite_master" + s = ("SELECT name FROM %s " + "WHERE type='table' ORDER BY name") % (master,) + rs = connection.execute(s) + return [row[0] for row in rs] + + @reflection.cache + def get_temp_table_names(self, connection, **kw): + s = "SELECT name FROM sqlite_temp_master "\ + "WHERE type='table' ORDER BY name " + rs = connection.execute(s) return [row[0] for row in rs] - def has_table(self, connection, table_name, schema=None): - quote = self.identifier_preparer.quote_identifier - if schema is not None: - pragma = "PRAGMA %s." % quote(schema) - else: - pragma = "PRAGMA " - qtable = quote(table_name) - statement = "%stable_info(%s)" % (pragma, qtable) - cursor = _pragma_cursor(connection.execute(statement)) - row = cursor.fetchone() + @reflection.cache + def get_temp_view_names(self, connection, **kw): + s = "SELECT name FROM sqlite_temp_master "\ + "WHERE type='view' ORDER BY name " + rs = connection.execute(s) - # consume remaining rows, to work around - # http://www.sqlite.org/cvstrac/tktview?tn=1884 - while not cursor.closed and cursor.fetchone() is not None: - pass + return [row[0] for row in rs] - return row is not None + def has_table(self, connection, table_name, schema=None): + info = self._get_table_pragma( + connection, "table_info", table_name, schema=schema) + return bool(info) @reflection.cache def get_view_names(self, connection, schema=None, **kw): if schema is not None: qschema = self.identifier_preparer.quote_identifier(schema) master = '%s.sqlite_master' % qschema - s = ("SELECT name FROM %s " - "WHERE type='view' ORDER BY name") % (master,) - rs = connection.execute(s) else: - try: - s = ("SELECT name FROM " - " (SELECT * FROM sqlite_master UNION ALL " - " SELECT * FROM sqlite_temp_master) " - "WHERE type='view' ORDER BY name") - rs = connection.execute(s) - except exc.DBAPIError: - s = ("SELECT name FROM sqlite_master " - "WHERE type='view' ORDER BY name") - rs = connection.execute(s) + master = "sqlite_master" + s = ("SELECT name FROM %s " + "WHERE type='view' ORDER BY name") % (master,) + rs = connection.execute(s) return [row[0] for row in rs] @@ -837,18 +1179,11 @@ def get_view_definition(self, connection, view_name, schema=None, **kw): @reflection.cache def get_columns(self, connection, table_name, schema=None, **kw): - quote = self.identifier_preparer.quote_identifier - if schema is not None: - pragma = "PRAGMA %s." % quote(schema) - else: - pragma = "PRAGMA " - qtable = quote(table_name) - statement = "%stable_info(%s)" % (pragma, qtable) - c = _pragma_cursor(connection.execute(statement)) + info = self._get_table_pragma( + connection, "table_info", table_name, schema=schema) - rows = c.fetchall() columns = [] - for row in rows: + for row in info: (name, type_, nullable, default, primary_key) = ( row[1], row[2].upper(), not row[3], row[4], row[5]) @@ -874,7 +1209,7 @@ def _get_column_info(self, name, type_, nullable, default, primary_key): def _resolve_type_affinity(self, type_): """Return a data type from a reflected column, using affinity tules. - SQLite's goal for universal compatability introduces some complexity + SQLite's goal for universal compatibility introduces some complexity during reflection, as a column's defined type might not actually be a type that SQLite understands - or indeed, my not be defined *at all*. Internally, SQLite handles this with a 'data type affinity' for each @@ -915,9 +1250,9 @@ def _resolve_type_affinity(self, type_): coltype = coltype(*[int(a) for a in args]) except TypeError: util.warn( - "Could not instantiate type %s with " - "reflected arguments %s; using no arguments." % - (coltype, args)) + "Could not instantiate type %s with " + "reflected arguments %s; using no arguments." % + (coltype, args)) coltype = coltype() else: coltype = coltype() @@ -935,115 +1270,219 @@ def get_pk_constraint(self, connection, table_name, schema=None, **kw): @reflection.cache def get_foreign_keys(self, connection, table_name, schema=None, **kw): - quote = self.identifier_preparer.quote_identifier - if schema is not None: - pragma = "PRAGMA %s." % quote(schema) - else: - pragma = "PRAGMA " - qtable = quote(table_name) - statement = "%sforeign_key_list(%s)" % (pragma, qtable) - c = _pragma_cursor(connection.execute(statement)) - fkeys = [] + # sqlite makes this *extremely difficult*. + # First, use the pragma to get the actual FKs. + pragma_fks = self._get_table_pragma( + connection, "foreign_key_list", + table_name, schema=schema + ) + fks = {} - while True: - row = c.fetchone() - if row is None: - break - (numerical_id, rtbl, lcol, rcol) = (row[0], row[2], row[3], row[4]) - self._parse_fk(fks, fkeys, numerical_id, rtbl, lcol, rcol) - return fkeys + for row in pragma_fks: + (numerical_id, rtbl, lcol, rcol) = ( + row[0], row[2], row[3], row[4]) - def _parse_fk(self, fks, fkeys, numerical_id, rtbl, lcol, rcol): - # sqlite won't return rcol if the table was created with REFERENCES - # , no col - if rcol is None: - rcol = lcol + if rcol is None: + rcol = lcol - if self._broken_fk_pragma_quotes: - rtbl = re.sub(r'^[\"\[`\']|[\"\]`\']$', '', rtbl) + if self._broken_fk_pragma_quotes: + rtbl = re.sub(r'^[\"\[`\']|[\"\]`\']$', '', rtbl) + + if numerical_id in fks: + fk = fks[numerical_id] + else: + fk = fks[numerical_id] = { + 'name': None, + 'constrained_columns': [], + 'referred_schema': None, + 'referred_table': rtbl, + 'referred_columns': [], + } + fks[numerical_id] = fk - try: - fk = fks[numerical_id] - except KeyError: - fk = { - 'name': None, - 'constrained_columns': [], - 'referred_schema': None, - 'referred_table': rtbl, - 'referred_columns': [], - } - fkeys.append(fk) - fks[numerical_id] = fk - - if lcol not in fk['constrained_columns']: fk['constrained_columns'].append(lcol) - if rcol not in fk['referred_columns']: fk['referred_columns'].append(rcol) - return fk + + def fk_sig(constrained_columns, referred_table, referred_columns): + return tuple(constrained_columns) + (referred_table,) + \ + tuple(referred_columns) + + # then, parse the actual SQL and attempt to find DDL that matches + # the names as well. SQLite saves the DDL in whatever format + # it was typed in as, so need to be liberal here. + + keys_by_signature = dict( + ( + fk_sig( + fk['constrained_columns'], + fk['referred_table'], fk['referred_columns']), + fk + ) for fk in fks.values() + ) + + table_data = self._get_table_sql(connection, table_name, schema=schema) + if table_data is None: + # system tables, etc. + return [] + + def parse_fks(): + FK_PATTERN = ( + '(?:CONSTRAINT (\w+) +)?' + 'FOREIGN KEY *\( *(.+?) *\) +' + 'REFERENCES +(?:(?:"(.+?)")|([a-z0-9_]+)) *\((.+?)\)' + ) + + for match in re.finditer(FK_PATTERN, table_data, re.I): + ( + constraint_name, constrained_columns, + referred_quoted_name, referred_name, + referred_columns) = match.group(1, 2, 3, 4, 5) + constrained_columns = list( + self._find_cols_in_sig(constrained_columns)) + if not referred_columns: + referred_columns = constrained_columns + else: + referred_columns = list( + self._find_cols_in_sig(referred_columns)) + referred_name = referred_quoted_name or referred_name + yield ( + constraint_name, constrained_columns, + referred_name, referred_columns) + fkeys = [] + + for ( + constraint_name, constrained_columns, + referred_name, referred_columns) in parse_fks(): + sig = fk_sig( + constrained_columns, referred_name, referred_columns) + if sig not in keys_by_signature: + util.warn( + "WARNING: SQL-parsed foreign key constraint " + "'%s' could not be located in PRAGMA " + "foreign_keys for table %s" % ( + sig, + table_name + )) + continue + key = keys_by_signature.pop(sig) + key['name'] = constraint_name + fkeys.append(key) + # assume the remainders are the unnamed, inline constraints, just + # use them as is as it's extremely difficult to parse inline + # constraints + fkeys.extend(keys_by_signature.values()) + return fkeys + + def _find_cols_in_sig(self, sig): + for match in re.finditer(r'(?:"(.+?)")|([a-z0-9_]+)', sig, re.I): + yield match.group(1) or match.group(2) + + @reflection.cache + def get_unique_constraints(self, connection, table_name, + schema=None, **kw): + + auto_index_by_sig = {} + for idx in self.get_indexes( + connection, table_name, schema=schema, + include_auto_indexes=True, **kw): + if not idx['name'].startswith("sqlite_autoindex"): + continue + sig = tuple(idx['column_names']) + auto_index_by_sig[sig] = idx + + table_data = self._get_table_sql( + connection, table_name, schema=schema, **kw) + if not table_data: + return [] + + unique_constraints = [] + + def parse_uqs(): + UNIQUE_PATTERN = '(?:CONSTRAINT "?(.+?)"? +)?UNIQUE *\((.+?)\)' + INLINE_UNIQUE_PATTERN = ( + '(?:(".+?")|([a-z0-9]+)) ' + '+[a-z0-9_ ]+? +UNIQUE') + + for match in re.finditer(UNIQUE_PATTERN, table_data, re.I): + name, cols = match.group(1, 2) + yield name, list(self._find_cols_in_sig(cols)) + + # we need to match inlines as well, as we seek to differentiate + # a UNIQUE constraint from a UNIQUE INDEX, even though these + # are kind of the same thing :) + for match in re.finditer(INLINE_UNIQUE_PATTERN, table_data, re.I): + cols = list( + self._find_cols_in_sig(match.group(1) or match.group(2))) + yield None, cols + + for name, cols in parse_uqs(): + sig = tuple(cols) + if sig in auto_index_by_sig: + auto_index_by_sig.pop(sig) + parsed_constraint = { + 'name': name, + 'column_names': cols + } + unique_constraints.append(parsed_constraint) + # NOTE: auto_index_by_sig might not be empty here, + # the PRIMARY KEY may have an entry. + return unique_constraints @reflection.cache def get_indexes(self, connection, table_name, schema=None, **kw): - quote = self.identifier_preparer.quote_identifier - if schema is not None: - pragma = "PRAGMA %s." % quote(schema) - else: - pragma = "PRAGMA " - include_auto_indexes = kw.pop('include_auto_indexes', False) - qtable = quote(table_name) - statement = "%sindex_list(%s)" % (pragma, qtable) - c = _pragma_cursor(connection.execute(statement)) + pragma_indexes = self._get_table_pragma( + connection, "index_list", table_name, schema=schema) indexes = [] - while True: - row = c.fetchone() - if row is None: - break + + include_auto_indexes = kw.pop('include_auto_indexes', False) + for row in pragma_indexes: # ignore implicit primary key index. # http://www.mail-archive.com/sqlite-users@sqlite.org/msg30517.html - elif (not include_auto_indexes and - row[1].startswith('sqlite_autoindex')): + if (not include_auto_indexes and + row[1].startswith('sqlite_autoindex')): continue indexes.append(dict(name=row[1], column_names=[], unique=row[2])) + # loop thru unique indexes to get the column names. for idx in indexes: - statement = "%sindex_info(%s)" % (pragma, quote(idx['name'])) - c = connection.execute(statement) - cols = idx['column_names'] - while True: - row = c.fetchone() - if row is None: - break - cols.append(row[2]) + pragma_index = self._get_table_pragma( + connection, "index_info", idx['name']) + + for row in pragma_index: + idx['column_names'].append(row[2]) return indexes @reflection.cache - def get_unique_constraints(self, connection, table_name, - schema=None, **kw): - UNIQUE_SQL = """ - SELECT sql - FROM - sqlite_master - WHERE - type='table' AND - name=:table_name - """ - c = connection.execute(UNIQUE_SQL, table_name=table_name) - table_data = c.fetchone()[0] - - UNIQUE_PATTERN = 'CONSTRAINT (\w+) UNIQUE \(([^\)]+)\)' - return [ - {'name': name, - 'column_names': [col.strip(' "') for col in cols.split(',')]} - for name, cols in re.findall(UNIQUE_PATTERN, table_data) - ] - - -def _pragma_cursor(cursor): - """work around SQLite issue whereby cursor.description - is blank when PRAGMA returns no rows.""" + def _get_table_sql(self, connection, table_name, schema=None, **kw): + try: + s = ("SELECT sql FROM " + " (SELECT * FROM sqlite_master UNION ALL " + " SELECT * FROM sqlite_temp_master) " + "WHERE name = '%s' " + "AND type = 'table'") % table_name + rs = connection.execute(s) + except exc.DBAPIError: + s = ("SELECT sql FROM sqlite_master WHERE name = '%s' " + "AND type = 'table'") % table_name + rs = connection.execute(s) + return rs.scalar() - if cursor.closed: - cursor.fetchone = lambda: None - cursor.fetchall = lambda: [] - return cursor + def _get_table_pragma(self, connection, pragma, table_name, schema=None): + quote = self.identifier_preparer.quote_identifier + if schema is not None: + statement = "PRAGMA %s." % quote(schema) + else: + statement = "PRAGMA " + qtable = quote(table_name) + statement = "%s%s(%s)" % (statement, pragma, qtable) + cursor = connection.execute(statement) + if not cursor._soft_closed: + # work around SQLite issue whereby cursor.description + # is blank when PRAGMA returns no rows: + # http://www.sqlite.org/cvstrac/tktview?tn=1884 + result = cursor.fetchall() + else: + result = [] + return result diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py b/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py new file mode 100644 index 0000000000..bbafc8d60c --- /dev/null +++ b/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py @@ -0,0 +1,116 @@ +# sqlite/pysqlcipher.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +.. dialect:: sqlite+pysqlcipher + :name: pysqlcipher + :dbapi: pysqlcipher + :connectstring: sqlite+pysqlcipher://:passphrase/file_path[?kdf_iter=] + :url: https://pypi.python.org/pypi/pysqlcipher + + ``pysqlcipher`` is a fork of the standard ``pysqlite`` driver to make + use of the `SQLCipher `_ backend. + + .. versionadded:: 0.9.9 + +Driver +------ + +The driver here is the `pysqlcipher `_ +driver, which makes use of the SQLCipher engine. This system essentially +introduces new PRAGMA commands to SQLite which allows the setting of a +passphrase and other encryption parameters, allowing the database +file to be encrypted. + +Connect Strings +--------------- + +The format of the connect string is in every way the same as that +of the :mod:`~sqlalchemy.dialects.sqlite.pysqlite` driver, except that the +"password" field is now accepted, which should contain a passphrase:: + + e = create_engine('sqlite+pysqlcipher://:testing@/foo.db') + +For an absolute file path, two leading slashes should be used for the +database name:: + + e = create_engine('sqlite+pysqlcipher://:testing@//path/to/foo.db') + +A selection of additional encryption-related pragmas supported by SQLCipher +as documented at https://www.zetetic.net/sqlcipher/sqlcipher-api/ can be passed +in the query string, and will result in that PRAGMA being called for each +new connection. Currently, ``cipher``, ``kdf_iter`` +``cipher_page_size`` and ``cipher_use_hmac`` are supported:: + + e = create_engine('sqlite+pysqlcipher://:testing@/foo.db?cipher=aes-256-cfb&kdf_iter=64000') + + +Pooling Behavior +---------------- + +The driver makes a change to the default pool behavior of pysqlite +as described in :ref:`pysqlite_threading_pooling`. The pysqlcipher driver +has been observed to be significantly slower on connection than the +pysqlite driver, most likely due to the encryption overhead, so the +dialect here defaults to using the :class:`.SingletonThreadPool` +implementation, +instead of the :class:`.NullPool` pool used by pysqlite. As always, the pool +implementation is entirely configurable using the +:paramref:`.create_engine.poolclass` parameter; the :class:`.StaticPool` may +be more feasible for single-threaded use, or :class:`.NullPool` may be used +to prevent unencrypted connections from being held open for long periods of +time, at the expense of slower startup time for new connections. + + +""" +from __future__ import absolute_import +from .pysqlite import SQLiteDialect_pysqlite +from ...engine import url as _url +from ... import pool + + +class SQLiteDialect_pysqlcipher(SQLiteDialect_pysqlite): + driver = 'pysqlcipher' + + pragmas = ('kdf_iter', 'cipher', 'cipher_page_size', 'cipher_use_hmac') + + @classmethod + def dbapi(cls): + from pysqlcipher import dbapi2 as sqlcipher + return sqlcipher + + @classmethod + def get_pool_class(cls, url): + return pool.SingletonThreadPool + + def connect(self, *cargs, **cparams): + passphrase = cparams.pop('passphrase', '') + + pragmas = dict( + (key, cparams.pop(key, None)) for key in + self.pragmas + ) + + conn = super(SQLiteDialect_pysqlcipher, self).\ + connect(*cargs, **cparams) + conn.execute('pragma key="%s"' % passphrase) + for prag, value in pragmas.items(): + if value is not None: + conn.execute('pragma %s=%s' % (prag, value)) + + return conn + + def create_connect_args(self, url): + super_url = _url.URL( + url.drivername, username=url.username, + host=url.host, database=url.database, query=url.query) + c_args, opts = super(SQLiteDialect_pysqlcipher, self).\ + create_connect_args(super_url) + opts['passphrase'] = url.password + return c_args, opts + +dialect = SQLiteDialect_pysqlcipher diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlite.py b/lib/sqlalchemy/dialects/sqlite/pysqlite.py index b53f4d4a04..33d04deebc 100644 --- a/lib/sqlalchemy/dialects/sqlite/pysqlite.py +++ b/lib/sqlalchemy/dialects/sqlite/pysqlite.py @@ -1,5 +1,6 @@ # sqlite/pysqlite.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -87,7 +88,8 @@ can be forced if one configures "native_datetime=True" on create_engine():: engine = create_engine('sqlite://', - connect_args={'detect_types': sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES}, + connect_args={'detect_types': + sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES}, native_datetime=True ) @@ -95,7 +97,8 @@ DATETIME or TIME types...confused yet ?) will not perform any bind parameter or result processing. Execution of "func.current_date()" will return a string. "func.current_timestamp()" is registered as returning a DATETIME type in -SQLAlchemy, so this function still receives SQLAlchemy-level result processing. +SQLAlchemy, so this function still receives SQLAlchemy-level result +processing. .. _pysqlite_threading_pooling: @@ -110,12 +113,12 @@ under any circumstances. Pysqlite does include a now-undocumented flag known as -``check_same_thread`` which will disable this check, however note that pysqlite -connections are still not safe to use in concurrently in multiple threads. -In particular, any statement execution calls would need to be externally -mutexed, as Pysqlite does not provide for thread-safe propagation of error -messages among other things. So while even ``:memory:`` databases can be -shared among threads in modern SQLite, Pysqlite doesn't provide enough +``check_same_thread`` which will disable this check, however note that +pysqlite connections are still not safe to use in concurrently in multiple +threads. In particular, any statement execution calls would need to be +externally mutexed, as Pysqlite does not provide for thread-safe propagation +of error messages among other things. So while even ``:memory:`` databases +can be shared among threads in modern SQLite, Pysqlite doesn't provide enough thread-safety to make this usage worth it. SQLAlchemy sets up pooling to work with Pysqlite's default behavior: @@ -141,8 +144,8 @@ Using a Memory Database in Multiple Threads ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -To use a ``:memory:`` database in a multithreaded scenario, the same connection -object must be shared among threads, since the database exists +To use a ``:memory:`` database in a multithreaded scenario, the same +connection object must be shared among threads, since the database exists only within the scope of that connection. The :class:`.StaticPool` implementation will maintain a single connection globally, and the ``check_same_thread`` flag can be passed to Pysqlite @@ -163,10 +166,10 @@ temporary table in a file-based SQLite database across multiple checkouts from the connection pool, such as when using an ORM :class:`.Session` where the temporary table should continue to remain after :meth:`.Session.commit` or -:meth:`.Session.rollback` is called, a pool which maintains a single connection must -be used. Use :class:`.SingletonThreadPool` if the scope is only needed -within the current thread, or :class:`.StaticPool` is scope is needed within -multiple threads for this case:: +:meth:`.Session.rollback` is called, a pool which maintains a single +connection must be used. Use :class:`.SingletonThreadPool` if the scope is +only needed within the current thread, or :class:`.StaticPool` is scope is +needed within multiple threads for this case:: # maintain the same connection per thread from sqlalchemy.pool import SingletonThreadPool @@ -197,29 +200,68 @@ .. _pysqlite_serializable: -Serializable Transaction Isolation ----------------------------------- +Serializable isolation / Savepoints / Transactional DDL +------------------------------------------------------- -The pysqlite DBAPI driver has a long-standing bug in which transactional -state is not begun until the first DML statement, that is INSERT, UPDATE -or DELETE, is emitted. A SELECT statement will not cause transactional -state to begin. While this mode of usage is fine for typical situations -and has the advantage that the SQLite database file is not prematurely -locked, it breaks serializable transaction isolation, which requires -that the database file be locked upon any SQL being emitted. +In the section :ref:`sqlite_concurrency`, we refer to the pysqlite +driver's assortment of issues that prevent several features of SQLite +from working correctly. The pysqlite DBAPI driver has several +long-standing bugs which impact the correctness of its transactional +behavior. In its default mode of operation, SQLite features such as +SERIALIZABLE isolation, transactional DDL, and SAVEPOINT support are +non-functional, and in order to use these features, workarounds must +be taken. -To work around this issue, the ``BEGIN`` keyword can be emitted -at the start of each transaction. The following recipe establishes -a :meth:`.ConnectionEvents.begin` handler to achieve this:: +The issue is essentially that the driver attempts to second-guess the user's +intent, failing to start transactions and sometimes ending them prematurely, in +an effort to minimize the SQLite databases's file locking behavior, even +though SQLite itself uses "shared" locks for read-only activities. + +SQLAlchemy chooses to not alter this behavior by default, as it is the +long-expected behavior of the pysqlite driver; if and when the pysqlite +driver attempts to repair these issues, that will be more of a driver towards +defaults for SQLAlchemy. + +The good news is that with a few events, we can implement transactional +support fully, by disabling pysqlite's feature entirely and emitting BEGIN +ourselves. This is achieved using two event listeners:: from sqlalchemy import create_engine, event - engine = create_engine("sqlite:///myfile.db", isolation_level='SERIALIZABLE') + engine = create_engine("sqlite:///myfile.db") + + @event.listens_for(engine, "connect") + def do_connect(dbapi_connection, connection_record): + # disable pysqlite's emitting of the BEGIN statement entirely. + # also stops it from emitting COMMIT before any DDL. + dbapi_connection.isolation_level = None @event.listens_for(engine, "begin") def do_begin(conn): + # emit our own BEGIN conn.execute("BEGIN") +Above, we intercept a new pysqlite connection and disable any transactional +integration. Then, at the point at which SQLAlchemy knows that transaction +scope is to begin, we emit ``"BEGIN"`` ourselves. + +When we take control of ``"BEGIN"``, we can also control directly SQLite's +locking modes, introduced at `BEGIN TRANSACTION `_, +by adding the desired locking mode to our ``"BEGIN"``:: + + @event.listens_for(engine, "begin") + def do_begin(conn): + conn.execute("BEGIN EXCLUSIVE") + +.. seealso:: + + `BEGIN TRANSACTION `_ - on the SQLite site + + `sqlite3 SELECT does not BEGIN a transaction `_ - on the Python bug tracker + + `sqlite3 module breaks transactions and potentially corrupts data `_ - on the Python bug tracker + + """ from sqlalchemy.dialects.sqlite.base import SQLiteDialect, DATETIME, DATE @@ -330,6 +372,6 @@ def create_connect_args(self, url): def is_disconnect(self, e, connection, cursor): return isinstance(e, self.dbapi.ProgrammingError) and \ - "Cannot operate on a closed database." in str(e) + "Cannot operate on a closed database." in str(e) dialect = SQLiteDialect_pysqlite diff --git a/lib/sqlalchemy/dialects/sybase/__init__.py b/lib/sqlalchemy/dialects/sybase/__init__.py index 85f9dd9c91..18535edcb2 100644 --- a/lib/sqlalchemy/dialects/sybase/__init__.py +++ b/lib/sqlalchemy/dialects/sybase/__init__.py @@ -1,5 +1,6 @@ # sybase/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -10,11 +11,11 @@ base.dialect = pyodbc.dialect from .base import CHAR, VARCHAR, TIME, NCHAR, NVARCHAR,\ - TEXT, DATE, DATETIME, FLOAT, NUMERIC,\ - BIGINT, INT, INTEGER, SMALLINT, BINARY,\ - VARBINARY, UNITEXT, UNICHAR, UNIVARCHAR,\ - IMAGE, BIT, MONEY, SMALLMONEY, TINYINT,\ - dialect + TEXT, DATE, DATETIME, FLOAT, NUMERIC,\ + BIGINT, INT, INTEGER, SMALLINT, BINARY,\ + VARBINARY, UNITEXT, UNICHAR, UNIVARCHAR,\ + IMAGE, BIT, MONEY, SMALLMONEY, TINYINT,\ + dialect __all__ = ( diff --git a/lib/sqlalchemy/dialects/sybase/base.py b/lib/sqlalchemy/dialects/sybase/base.py index 501270778f..1e38534fb0 100644 --- a/lib/sqlalchemy/dialects/sybase/base.py +++ b/lib/sqlalchemy/dialects/sybase/base.py @@ -1,5 +1,6 @@ # sybase/base.py -# Copyright (C) 2010-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2010-2016 the SQLAlchemy authors and contributors +# # get_select_precolumns(), limit_clause() implementation # copyright (C) 2007 Fisch Asset Management # AG http://www.fam.ch, with coding by Alexander Houben @@ -31,10 +32,10 @@ from sqlalchemy import util, sql, exc from sqlalchemy.types import CHAR, VARCHAR, TIME, NCHAR, NVARCHAR,\ - TEXT, DATE, DATETIME, FLOAT, NUMERIC,\ - BIGINT, INT, INTEGER, SMALLINT, BINARY,\ - VARBINARY, DECIMAL, TIMESTAMP, Unicode,\ - UnicodeText, REAL + TEXT, DATE, DATETIME, FLOAT, NUMERIC,\ + BIGINT, INT, INTEGER, SMALLINT, BINARY,\ + VARBINARY, DECIMAL, TIMESTAMP, Unicode,\ + UnicodeText, REAL RESERVED_WORDS = set([ "add", "all", "alter", "and", @@ -93,7 +94,7 @@ "when", "where", "while", "window", "with", "with_cube", "with_lparen", "with_rollup", "within", "work", "writetext", - ]) +]) class _SybaseUnitypeMixin(object): @@ -145,40 +146,40 @@ class IMAGE(sqltypes.LargeBinary): class SybaseTypeCompiler(compiler.GenericTypeCompiler): - def visit_large_binary(self, type_): + def visit_large_binary(self, type_, **kw): return self.visit_IMAGE(type_) - def visit_boolean(self, type_): + def visit_boolean(self, type_, **kw): return self.visit_BIT(type_) - def visit_unicode(self, type_): + def visit_unicode(self, type_, **kw): return self.visit_NVARCHAR(type_) - def visit_UNICHAR(self, type_): + def visit_UNICHAR(self, type_, **kw): return "UNICHAR(%d)" % type_.length - def visit_UNIVARCHAR(self, type_): + def visit_UNIVARCHAR(self, type_, **kw): return "UNIVARCHAR(%d)" % type_.length - def visit_UNITEXT(self, type_): + def visit_UNITEXT(self, type_, **kw): return "UNITEXT" - def visit_TINYINT(self, type_): + def visit_TINYINT(self, type_, **kw): return "TINYINT" - def visit_IMAGE(self, type_): + def visit_IMAGE(self, type_, **kw): return "IMAGE" - def visit_BIT(self, type_): + def visit_BIT(self, type_, **kw): return "BIT" - def visit_MONEY(self, type_): + def visit_MONEY(self, type_, **kw): return "MONEY" - def visit_SMALLMONEY(self, type_): + def visit_SMALLMONEY(self, type_, **kw): return "SMALLMONEY" - def visit_UNIQUEIDENTIFIER(self, type_): + def visit_UNIQUEIDENTIFIER(self, type_, **kw): return "UNIQUEIDENTIFIER" ischema_names = { @@ -224,7 +225,7 @@ def visit_UNIQUEIDENTIFIER(self, type_): 'image': IMAGE, 'bit': BIT, -# not in documentation for ASE 15.7 + # not in documentation for ASE 15.7 'long varchar': TEXT, # TODO 'timestamp': TIMESTAMP, 'uniqueidentifier': UNIQUEIDENTIFIER, @@ -267,12 +268,13 @@ def pre_exec(self): if insert_has_sequence: self._enable_identity_insert = \ - seq_column.key in self.compiled_parameters[0] + seq_column.key in self.compiled_parameters[0] else: self._enable_identity_insert = False if self._enable_identity_insert: - self.cursor.execute("SET IDENTITY_INSERT %s ON" % + self.cursor.execute( + "SET IDENTITY_INSERT %s ON" % self.dialect.identifier_preparer.format_table(tbl)) if self.isddl: @@ -281,15 +283,15 @@ def pre_exec(self): # include a note about that. if not self.should_autocommit: raise exc.InvalidRequestError( - "The Sybase dialect only supports " - "DDL in 'autocommit' mode at this time.") + "The Sybase dialect only supports " + "DDL in 'autocommit' mode at this time.") self.root_connection.engine.logger.info( - "AUTOCOMMIT (Assuming no Sybase 'ddl in tran')") + "AUTOCOMMIT (Assuming no Sybase 'ddl in tran')") self.set_ddl_autocommit( - self.root_connection.connection.connection, - True) + self.root_connection.connection.connection, + True) def post_exec(self): if self.isddl: @@ -297,10 +299,10 @@ def post_exec(self): if self._enable_identity_insert: self.cursor.execute( - "SET IDENTITY_INSERT %s OFF" % - self.dialect.identifier_preparer. - format_table(self.compiled.statement.table) - ) + "SET IDENTITY_INSERT %s OFF" % + self.dialect.identifier_preparer. + format_table(self.compiled.statement.table) + ) def get_lastrowid(self): cursor = self.create_cursor() @@ -316,40 +318,42 @@ class SybaseSQLCompiler(compiler.SQLCompiler): extract_map = util.update_copy( compiler.SQLCompiler.extract_map, { - 'doy': 'dayofyear', - 'dow': 'weekday', - 'milliseconds': 'millisecond' - }) + 'doy': 'dayofyear', + 'dow': 'weekday', + 'milliseconds': 'millisecond' + }) - def get_select_precolumns(self, select): + def get_select_precolumns(self, select, **kw): s = select._distinct and "DISTINCT " or "" # TODO: don't think Sybase supports # bind params for FIRST / TOP - if select._limit: - #if select._limit == 1: - #s += "FIRST " - #else: - #s += "TOP %s " % (select._limit,) - s += "TOP %s " % (select._limit,) - if select._offset: - if not select._limit: + limit = select._limit + if limit: + # if select._limit == 1: + # s += "FIRST " + # else: + # s += "TOP %s " % (select._limit,) + s += "TOP %s " % (limit,) + offset = select._offset + if offset: + if not limit: # FIXME: sybase doesn't allow an offset without a limit # so use a huge value for TOP here s += "TOP 1000000 " - s += "START AT %s " % (select._offset + 1,) + s += "START AT %s " % (offset + 1,) return s def get_from_hint_text(self, table, text): return text - def limit_clause(self, select): + def limit_clause(self, select, **kw): # Limit in sybase is after the select keyword return "" def visit_extract(self, extract, **kw): field = self.extract_map.get(extract.field, extract.field) return 'DATEPART("%s", %s)' % ( - field, self.process(extract.expr, **kw)) + field, self.process(extract.expr, **kw)) def visit_now_func(self, fn, **kw): return "GETDATE()" @@ -373,21 +377,22 @@ def order_by_clause(self, select, **kw): class SybaseDDLCompiler(compiler.DDLCompiler): def get_column_specification(self, column, **kwargs): colspec = self.preparer.format_column(column) + " " + \ - self.dialect.type_compiler.process(column.type) + self.dialect.type_compiler.process( + column.type, type_expression=column) if column.table is None: raise exc.CompileError( - "The Sybase dialect requires Table-bound " - "columns in order to generate DDL") + "The Sybase dialect requires Table-bound " + "columns in order to generate DDL") seq_col = column.table._autoincrement_column # install a IDENTITY Sequence if we have an implicit IDENTITY column if seq_col is column: sequence = isinstance(column.default, sa_schema.Sequence) \ - and column.default + and column.default if sequence: start, increment = sequence.start or 1, \ - sequence.increment or 1 + sequence.increment or 1 else: start, increment = 1, 1 if (start, increment) == (1, 1): @@ -413,8 +418,8 @@ def visit_drop_index(self, drop): return "\nDROP INDEX %s.%s" % ( self.preparer.quote_identifier(index.table.name), self._prepared_index_name(drop.element, - include_schema=False) - ) + include_schema=False) + ) class SybaseIdentifierPreparer(compiler.IdentifierPreparer): @@ -444,14 +449,14 @@ class SybaseDialect(default.DefaultDialect): def _get_default_schema_name(self, connection): return connection.scalar( - text("SELECT user_name() as user_name", - typemap={'user_name': Unicode}) - ) + text("SELECT user_name() as user_name", + typemap={'user_name': Unicode}) + ) def initialize(self, connection): super(SybaseDialect, self).initialize(connection) if self.server_version_info is not None and\ - self.server_version_info < (15, ): + self.server_version_info < (15, ): self.max_identifier_length = 30 else: self.max_identifier_length = 255 @@ -517,14 +522,15 @@ def get_columns(self, connection, table_name, schema=None, **kw): for (name, type_, nullable, autoincrement, default, precision, scale, length) in results: col_info = self._get_column_info(name, type_, bool(nullable), - bool(autoincrement), default, precision, scale, - length) + bool(autoincrement), + default, precision, scale, + length) columns.append(col_info) return columns def _get_column_info(self, name, type_, nullable, autoincrement, default, - precision, scale, length): + precision, scale, length): coltype = self.ischema_names.get(type_, None) @@ -541,8 +547,8 @@ def _get_column_info(self, name, type_, nullable, autoincrement, default, if coltype: coltype = coltype(*args, **kwargs) - #is this necessary - #if is_array: + # is this necessary + # if is_array: # coltype = ARRAY(coltype) else: util.warn("Did not recognize type '%s' of column '%s'" % @@ -602,8 +608,8 @@ def get_foreign_keys(self, connection, table_name, schema=None, **kw): FROM sysreferences r JOIN sysobjects o on r.tableid = o.id WHERE r.tableid = :table_id """) - referential_constraints = connection.execute(REFCONSTRAINT_SQL, - table_id=table_id) + referential_constraints = connection.execute( + REFCONSTRAINT_SQL, table_id=table_id).fetchall() REFTABLE_SQL = text(""" SELECT o.name AS name, u.name AS 'schema' @@ -640,12 +646,12 @@ def get_foreign_keys(self, connection, table_name, schema=None, **kw): referred_columns.append(reftable_columns[r["refkey%i" % i]]) fk_info = { - "constrained_columns": constrained_columns, - "referred_schema": reftable["schema"], - "referred_table": reftable["name"], - "referred_columns": referred_columns, - "name": r["name"] - } + "constrained_columns": constrained_columns, + "referred_schema": reftable["schema"], + "referred_table": reftable["name"], + "referred_columns": referred_columns, + "name": r["name"] + } foreign_keys.append(fk_info) @@ -734,10 +740,13 @@ def get_pk_constraint(self, connection, table_name, schema=None, **kw): results.close() constrained_columns = [] - for i in range(1, pks["count"] + 1): - constrained_columns.append(pks["pk_%i" % (i,)]) - return {"constrained_columns": constrained_columns, - "name": pks["name"]} + if pks: + for i in range(1, pks["count"] + 1): + constrained_columns.append(pks["pk_%i" % (i,)]) + return {"constrained_columns": constrained_columns, + "name": pks["name"]} + else: + return {"constrained_columns": [], "name": None} @reflection.cache def get_schema_names(self, connection, **kw): diff --git a/lib/sqlalchemy/dialects/sybase/mxodbc.py b/lib/sqlalchemy/dialects/sybase/mxodbc.py index f14d1c420a..60e6510a5b 100644 --- a/lib/sqlalchemy/dialects/sybase/mxodbc.py +++ b/lib/sqlalchemy/dialects/sybase/mxodbc.py @@ -1,5 +1,6 @@ # sybase/mxodbc.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/lib/sqlalchemy/dialects/sybase/pyodbc.py b/lib/sqlalchemy/dialects/sybase/pyodbc.py index f773e5a6d1..348ca321d2 100644 --- a/lib/sqlalchemy/dialects/sybase/pyodbc.py +++ b/lib/sqlalchemy/dialects/sybase/pyodbc.py @@ -1,5 +1,6 @@ # sybase/pyodbc.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -8,7 +9,8 @@ .. dialect:: sybase+pyodbc :name: PyODBC :dbapi: pyodbc - :connectstring: sybase+pyodbc://:@[/] + :connectstring: sybase+pyodbc://:@\ +[/] :url: http://pypi.python.org/pypi/pyodbc/ @@ -33,7 +35,7 @@ """ from sqlalchemy.dialects.sybase.base import SybaseDialect,\ - SybaseExecutionContext + SybaseExecutionContext from sqlalchemy.connectors.pyodbc import PyODBCConnector from sqlalchemy import types as sqltypes, processors import decimal @@ -50,7 +52,7 @@ class _SybNumeric_pyodbc(sqltypes.Numeric): def bind_processor(self, dialect): super_process = super(_SybNumeric_pyodbc, self).\ - bind_processor(dialect) + bind_processor(dialect) def process(value): if self.asdecimal and \ diff --git a/lib/sqlalchemy/dialects/sybase/pysybase.py b/lib/sqlalchemy/dialects/sybase/pysybase.py index 664bd9ac01..41ca47fd37 100644 --- a/lib/sqlalchemy/dialects/sybase/pysybase.py +++ b/lib/sqlalchemy/dialects/sybase/pysybase.py @@ -1,5 +1,6 @@ # sybase/pysybase.py -# Copyright (C) 2010-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2010-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -8,7 +9,8 @@ .. dialect:: sybase+pysybase :name: Python-Sybase :dbapi: Sybase - :connectstring: sybase+pysybase://:@/[database name] + :connectstring: sybase+pysybase://:@/\ +[database name] :url: http://python-sybase.sourceforge.net/ Unicode Support @@ -21,7 +23,7 @@ from sqlalchemy import types as sqltypes, processors from sqlalchemy.dialects.sybase.base import SybaseDialect, \ - SybaseExecutionContext, SybaseSQLCompiler + SybaseExecutionContext, SybaseSQLCompiler class _SybNumeric(sqltypes.Numeric): @@ -61,8 +63,8 @@ class SybaseDialect_pysybase(SybaseDialect): statement_compiler = SybaseSQLCompiler_pysybase colspecs = { - sqltypes.Numeric: _SybNumeric, - sqltypes.Float: sqltypes.Float + sqltypes.Numeric: _SybNumeric, + sqltypes.Float: sqltypes.Float } @classmethod @@ -89,7 +91,7 @@ def _get_server_version_info(self, connection): def is_disconnect(self, e, connection, cursor): if isinstance(e, (self.dbapi.OperationalError, - self.dbapi.ProgrammingError)): + self.dbapi.ProgrammingError)): msg = str(e) return ('Unable to complete network request to host' in msg or 'Invalid connection state' in msg or diff --git a/lib/sqlalchemy/dialects/type_migration_guidelines.txt b/lib/sqlalchemy/dialects/type_migration_guidelines.txt deleted file mode 100644 index 1ca15f7fb1..0000000000 --- a/lib/sqlalchemy/dialects/type_migration_guidelines.txt +++ /dev/null @@ -1,145 +0,0 @@ -Rules for Migrating TypeEngine classes to 0.6 ---------------------------------------------- - -1. the TypeEngine classes are used for: - - a. Specifying behavior which needs to occur for bind parameters - or result row columns. - - b. Specifying types that are entirely specific to the database - in use and have no analogue in the sqlalchemy.types package. - - c. Specifying types where there is an analogue in sqlalchemy.types, - but the database in use takes vendor-specific flags for those - types. - - d. If a TypeEngine class doesn't provide any of this, it should be - *removed* from the dialect. - -2. the TypeEngine classes are *no longer* used for generating DDL. Dialects -now have a TypeCompiler subclass which uses the same visit_XXX model as -other compilers. - -3. the "ischema_names" and "colspecs" dictionaries are now required members on -the Dialect class. - -4. The names of types within dialects are now important. If a dialect-specific type -is a subclass of an existing generic type and is only provided for bind/result behavior, -the current mixed case naming can remain, i.e. _PGNumeric for Numeric - in this case, -end users would never need to use _PGNumeric directly. However, if a dialect-specific -type is specifying a type *or* arguments that are not present generically, it should -match the real name of the type on that backend, in uppercase. E.g. postgresql.INET, -mysql.ENUM, postgresql.ARRAY. - -Or follow this handy flowchart: - - is the type meant to provide bind/result is the type the same name as an - behavior to a generic type (i.e. MixedCase) ---- no ---> UPPERCASE type in types.py ? - type in types.py ? | | - | no yes - yes | | - | | does your type need special - | +<--- yes --- behavior or arguments ? - | | | - | | no - name the type using | | - _MixedCase, i.e. v V - _OracleBoolean. it name the type don't make a - stays private to the dialect identically as that type, make sure the dialect's - and is invoked *only* via within the DB, base.py imports the types.py - the colspecs dict. using UPPERCASE UPPERCASE name into its namespace - | (i.e. BIT, NCHAR, INTERVAL). - | Users can import it. - | | - v v - subclass the closest is the name of this type - MixedCase type types.py, identical to an UPPERCASE - i.e. <--- no ------- name in types.py ? - class _DateTime(types.DateTime), - class DATETIME2(types.DateTime), | - class BIT(types.TypeEngine). yes - | - v - the type should - subclass the - UPPERCASE - type in types.py - (i.e. class BLOB(types.BLOB)) - - -Example 1. pysqlite needs bind/result processing for the DateTime type in types.py, -which applies to all DateTimes and subclasses. It's named _SLDateTime and -subclasses types.DateTime. - -Example 2. MS-SQL has a TIME type which takes a non-standard "precision" argument -that is rendered within DDL. So it's named TIME in the MS-SQL dialect's base.py, -and subclasses types.TIME. Users can then say mssql.TIME(precision=10). - -Example 3. MS-SQL dialects also need special bind/result processing for date -But its DATE type doesn't render DDL differently than that of a plain -DATE, i.e. it takes no special arguments. Therefore we are just adding behavior -to types.Date, so it's named _MSDate in the MS-SQL dialect's base.py, and subclasses -types.Date. - -Example 4. MySQL has a SET type, there's no analogue for this in types.py. So -MySQL names it SET in the dialect's base.py, and it subclasses types.String, since -it ultimately deals with strings. - -Example 5. Postgresql has a DATETIME type. The DBAPIs handle dates correctly, -and no special arguments are used in PG's DDL beyond what types.py provides. -Postgresql dialect therefore imports types.DATETIME into its base.py. - -Ideally one should be able to specify a schema using names imported completely from a -dialect, all matching the real name on that backend: - - from sqlalchemy.dialects.postgresql import base as pg - - t = Table('mytable', metadata, - Column('id', pg.INTEGER, primary_key=True), - Column('name', pg.VARCHAR(300)), - Column('inetaddr', pg.INET) - ) - -where above, the INTEGER and VARCHAR types are ultimately from sqlalchemy.types, -but the PG dialect makes them available in its own namespace. - -5. "colspecs" now is a dictionary of generic or uppercased types from sqlalchemy.types -linked to types specified in the dialect. Again, if a type in the dialect does not -specify any special behavior for bind_processor() or result_processor() and does not -indicate a special type only available in this database, it must be *removed* from the -module and from this dictionary. - -6. "ischema_names" indicates string descriptions of types as returned from the database -linked to TypeEngine classes. - - a. The string name should be matched to the most specific type possible within - sqlalchemy.types, unless there is no matching type within sqlalchemy.types in which - case it points to a dialect type. *It doesn't matter* if the dialect has it's - own subclass of that type with special bind/result behavior - reflect to the types.py - UPPERCASE type as much as possible. With very few exceptions, all types - should reflect to an UPPERCASE type. - - b. If the dialect contains a matching dialect-specific type that takes extra arguments - which the generic one does not, then point to the dialect-specific type. E.g. - mssql.VARCHAR takes a "collation" parameter which should be preserved. - -5. DDL, or what was formerly issued by "get_col_spec()", is now handled exclusively by -a subclass of compiler.GenericTypeCompiler. - - a. your TypeCompiler class will receive generic and uppercase types from - sqlalchemy.types. Do not assume the presence of dialect-specific attributes on - these types. - - b. the visit_UPPERCASE methods on GenericTypeCompiler should *not* be overridden with - methods that produce a different DDL name. Uppercase types don't do any kind of - "guessing" - if visit_TIMESTAMP is called, the DDL should render as TIMESTAMP in - all cases, regardless of whether or not that type is legal on the backend database. - - c. the visit_UPPERCASE methods *should* be overridden with methods that add additional - arguments and flags to those types. - - d. the visit_lowercase methods are overridden to provide an interpretation of a generic - type. E.g. visit_large_binary() might be overridden to say "return self.visit_BIT(type_)". - - e. visit_lowercase methods should *never* render strings directly - it should always - be via calling a visit_UPPERCASE() method. diff --git a/lib/sqlalchemy/engine/__init__.py b/lib/sqlalchemy/engine/__init__.py index 99251f6303..4c19c055e0 100644 --- a/lib/sqlalchemy/engine/__init__.py +++ b/lib/sqlalchemy/engine/__init__.py @@ -1,5 +1,6 @@ # engine/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -9,7 +10,7 @@ The engine package defines the basic components used to interface DB-API modules with higher-level statement construction, connection-management, execution and result contexts. The primary -"entry point" class into this package is the Engine and it's public +"entry point" class into this package is the Engine and its public constructor ``create_engine()``. This package includes: @@ -54,6 +55,7 @@ Connectable, Dialect, ExecutionContext, + ExceptionContext, # backwards compat Compiled, @@ -67,20 +69,21 @@ RootTransaction, Transaction, TwoPhaseTransaction, - ) +) from .result import ( + BaseRowProxy, BufferedColumnResultProxy, BufferedColumnRow, BufferedRowResultProxy, FullyBufferedResultProxy, ResultProxy, RowProxy, - ) +) from .util import ( connection_memoize - ) +) from . import util, strategies @@ -246,6 +249,34 @@ def create_engine(*args, **kwargs): Microsoft SQL Server. Set this to ``False`` to disable the automatic usage of RETURNING. + :param isolation_level: this string parameter is interpreted by various + dialects in order to affect the transaction isolation level of the + database connection. The parameter essentially accepts some subset of + these string arguments: ``"SERIALIZABLE"``, ``"REPEATABLE_READ"``, + ``"READ_COMMITTED"``, ``"READ_UNCOMMITTED"`` and ``"AUTOCOMMIT"``. + Behavior here varies per backend, and + individual dialects should be consulted directly. + + Note that the isolation level can also be set on a per-:class:`.Connection` + basis as well, using the + :paramref:`.Connection.execution_options.isolation_level` + feature. + + .. seealso:: + + :attr:`.Connection.default_isolation_level` - view default level + + :paramref:`.Connection.execution_options.isolation_level` + - set per :class:`.Connection` isolation level + + :ref:`SQLite Transaction Isolation ` + + :ref:`Postgresql Transaction Isolation ` + + :ref:`MySQL Transaction Isolation ` + + :ref:`session_transaction_isolation` - for the ORM + :param label_length=None: optional integer value which limits the size of dynamically generated column labels to that many characters. If less than 6, labels are generated as @@ -274,6 +305,17 @@ def create_engine(*args, **kwargs): be used instead. Can be used for testing of DBAPIs as well as to inject "mock" DBAPI implementations into the :class:`.Engine`. + :param paramstyle=None: The `paramstyle `_ + to use when rendering bound parameters. This style defaults to the + one recommended by the DBAPI itself, which is retrieved from the + ``.paramstyle`` attribute of the DBAPI. However, most DBAPIs accept + more than one paramstyle, and in particular it may be desirable + to change a "named" paramstyle into a "positional" one, or vice versa. + When this attribute is passed, it should be one of the values + ``"qmark"``, ``"numeric"``, ``"named"``, ``"format"`` or + ``"pyformat"``, and should correspond to a parameter style known + to be supported by the DBAPI in use. + :param pool=None: an already-constructed instance of :class:`~sqlalchemy.pool.Pool`, such as a :class:`~sqlalchemy.pool.QueuePool` instance. If non-None, this @@ -331,7 +373,7 @@ def create_engine(*args, **kwargs): * the ``mock`` strategy, which dispatches all statement execution to a function passed as the argument ``executor``. See `example in the FAQ - `_. + `_. :param executor=None: a function taking arguments ``(sql, *multiparams, **params)``, to which the ``mock`` strategy will @@ -347,14 +389,33 @@ def create_engine(*args, **kwargs): def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs): """Create a new Engine instance using a configuration dictionary. - The dictionary is typically produced from a config file where keys - are prefixed, such as sqlalchemy.url, sqlalchemy.echo, etc. The - 'prefix' argument indicates the prefix to be searched for. + The dictionary is typically produced from a config file. + + The keys of interest to ``engine_from_config()`` should be prefixed, e.g. + ``sqlalchemy.url``, ``sqlalchemy.echo``, etc. The 'prefix' argument + indicates the prefix to be searched for. Each matching key (after the + prefix is stripped) is treated as though it were the corresponding keyword + argument to a :func:`.create_engine` call. + + The only required key is (assuming the default prefix) ``sqlalchemy.url``, + which provides the :ref:`database URL `. A select set of keyword arguments will be "coerced" to their - expected type based on string values. In a future release, this - functionality will be expanded and include dialect-specific - arguments. + expected type based on string values. The set of arguments + is extensible per-dialect using the ``engine_config_types`` accessor. + + :param configuration: A dictionary (typically produced from a config file, + but this is not a requirement). Items whose keys start with the value + of 'prefix' will have that prefix stripped, and will then be passed to + :ref:`create_engine`. + + :param prefix: Prefix to match and then strip from keys + in 'configuration'. + + :param kwargs: Each keyword argument to ``engine_from_config()`` itself + overrides the corresponding item taken from the 'configuration' + dictionary. Keyword arguments should *not* be prefixed. + """ options = dict((key[len(prefix):], configuration[key]) @@ -369,4 +430,4 @@ def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs): __all__ = ( 'create_engine', 'engine_from_config', - ) +) diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py index 9f656cac8b..80edd95072 100644 --- a/lib/sqlalchemy/engine/base.py +++ b/lib/sqlalchemy/engine/base.py @@ -1,5 +1,6 @@ # engine/base.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -12,8 +13,8 @@ import sys from .. import exc, util, log, interfaces -from ..sql import expression, util as sql_util, schema, ddl -from .interfaces import Connectable, Compiled +from ..sql import util as sql_util +from .interfaces import Connectable, ExceptionContext from .util import _distill_params import contextlib @@ -44,7 +45,7 @@ class Connection(Connectable): """ def __init__(self, engine, connection=None, close_with_result=False, - _branch=False, _execution_options=None, + _branch_from=None, _execution_options=None, _dispatch=None, _has_events=None): """Construct a new Connection. @@ -56,48 +57,80 @@ def __init__(self, engine, connection=None, close_with_result=False, """ self.engine = engine self.dialect = engine.dialect - self.__connection = connection or engine.raw_connection() - self.__transaction = None - self.should_close_with_result = close_with_result - self.__savepoint_seq = 0 - self.__branch = _branch - self.__invalid = False - self.__can_reconnect = True - if _dispatch: + self.__branch_from = _branch_from + self.__branch = _branch_from is not None + + if _branch_from: + self.__connection = connection + self._execution_options = _execution_options + self._echo = _branch_from._echo + self.should_close_with_result = False self.dispatch = _dispatch - elif _has_events is None: - # if _has_events is sent explicitly as False, - # then don't join the dispatch of the engine; we don't - # want to handle any of the engine's events in that case. - self.dispatch = self.dispatch._join(engine.dispatch) - self._has_events = _has_events or ( - _has_events is None and engine._has_events) - - self._echo = self.engine._should_log_info() - if _execution_options: - self._execution_options =\ - engine._execution_options.union(_execution_options) + self._has_events = _branch_from._has_events else: + self.__connection = connection \ + if connection is not None else engine.raw_connection() + self.__transaction = None + self.__savepoint_seq = 0 + self.should_close_with_result = close_with_result + self.__invalid = False + self.__can_reconnect = True + self._echo = self.engine._should_log_info() + + if _has_events is None: + # if _has_events is sent explicitly as False, + # then don't join the dispatch of the engine; we don't + # want to handle any of the engine's events in that case. + self.dispatch = self.dispatch._join(engine.dispatch) + self._has_events = _has_events or ( + _has_events is None and engine._has_events) + + assert not _execution_options self._execution_options = engine._execution_options if self._has_events or self.engine._has_events: - self.dispatch.engine_connect(self, _branch) + self.dispatch.engine_connect(self, self.__branch) def _branch(self): """Return a new Connection which references this Connection's engine and connection; but does not have close_with_result enabled, and also whose close() method does nothing. - This is used to execute "sub" statements within a single execution, - usually an INSERT statement. + The Core uses this very sparingly, only in the case of + custom SQL default functions that are to be INSERTed as the + primary key of a row where we need to get the value back, so we have + to invoke it distinctly - this is a very uncommon case. + + Userland code accesses _branch() when the connect() or + contextual_connect() methods are called. The branched connection + acts as much as possible like the parent, except that it stays + connected when a close() event occurs. + + """ + if self.__branch_from: + return self.__branch_from._branch() + else: + return self.engine._connection_cls( + self.engine, + self.__connection, + _branch_from=self, + _execution_options=self._execution_options, + _has_events=self._has_events, + _dispatch=self.dispatch) + + @property + def _root(self): + """return the 'root' connection. + + Returns 'self' if this connection is not a branch, else + returns the root connection from which we ultimately branched. + """ - return self.engine._connection_cls( - self.engine, - self.__connection, - _branch=True, - _has_events=self._has_events, - _dispatch=self.dispatch) + if self.__branch_from: + return self.__branch_from + else: + return self def _clone(self): """Create a shallow copy of this Connection. @@ -122,7 +155,7 @@ def execution_options(self, **opt): execution options which will take effect for a call to :meth:`execute`. As the new :class:`.Connection` references the same underlying resource, it's usually a good idea to ensure that the copies - would be discarded immediately, which is implicit if used as in:: + will be discarded immediately, which is implicit if used as in:: result = connection.execution_options(stream_results=True).\\ execute(stmt) @@ -168,14 +201,19 @@ def execution_options(self, **opt): used by the ORM internally supersedes a cache dictionary specified here. - :param isolation_level: Available on: Connection. + :param isolation_level: Available on: :class:`.Connection`. Set the transaction isolation level for - the lifespan of this connection. Valid values include - those string values accepted by the ``isolation_level`` - parameter passed to :func:`.create_engine`, and are - database specific, including those for :ref:`sqlite_toplevel`, - :ref:`postgresql_toplevel` - see those dialect's documentation - for further info. + the lifespan of this :class:`.Connection` object (*not* the + underyling DBAPI connection, for which the level is reset + to its original setting upon termination of this + :class:`.Connection` object). + + Valid values include + those string values accepted by the + :paramref:`.create_engine.isolation_level` + parameter passed to :func:`.create_engine`. These levels are + semi-database specific; see individual dialect documentation for + valid levels. Note that this option necessarily affects the underlying DBAPI connection for the lifespan of the originating @@ -184,6 +222,41 @@ def execution_options(self, **opt): is returned to the connection pool, i.e. the :meth:`.Connection.close` method is called. + .. warning:: The ``isolation_level`` execution option should + **not** be used when a transaction is already established, that + is, the :meth:`.Connection.begin` method or similar has been + called. A database cannot change the isolation level on a + transaction in progress, and different DBAPIs and/or + SQLAlchemy dialects may implicitly roll back or commit + the transaction, or not affect the connection at all. + + .. versionchanged:: 0.9.9 A warning is emitted when the + ``isolation_level`` execution option is used after a + transaction has been started with :meth:`.Connection.begin` + or similar. + + .. note:: The ``isolation_level`` execution option is implicitly + reset if the :class:`.Connection` is invalidated, e.g. via + the :meth:`.Connection.invalidate` method, or if a + disconnection error occurs. The new connection produced after + the invalidation will not have the isolation level re-applied + to it automatically. + + .. seealso:: + + :paramref:`.create_engine.isolation_level` + - set per :class:`.Engine` isolation level + + :meth:`.Connection.get_isolation_level` - view current level + + :ref:`SQLite Transaction Isolation ` + + :ref:`Postgresql Transaction Isolation ` + + :ref:`MySQL Transaction Isolation ` + + :ref:`session_transaction_isolation` - for the ORM + :param no_parameters: When ``True``, if the final parameter list or dictionary is totally empty, will invoke the statement on the cursor as ``cursor.execute(statement)``, @@ -223,24 +296,101 @@ def closed(self): def invalidated(self): """Return True if this connection was invalidated.""" - return self.__invalid + return self._root.__invalid @property def connection(self): - "The underlying DB-API connection managed by this Connection." + """The underlying DB-API connection managed by this Connection. + + .. seealso:: + + + :ref:`dbapi_connections` + + """ try: return self.__connection except AttributeError: - return self._revalidate_connection() + try: + return self._revalidate_connection() + except Exception as e: + self._handle_dbapi_exception(e, None, None, None, None) + + def get_isolation_level(self): + """Return the current isolation level assigned to this + :class:`.Connection`. + + This will typically be the default isolation level as determined + by the dialect, unless if the + :paramref:`.Connection.execution_options.isolation_level` + feature has been used to alter the isolation level on a + per-:class:`.Connection` basis. + + This attribute will typically perform a live SQL operation in order + to procure the current isolation level, so the value returned is the + actual level on the underlying DBAPI connection regardless of how + this state was set. Compare to the + :attr:`.Connection.default_isolation_level` accessor + which returns the dialect-level setting without performing a SQL + query. + + .. versionadded:: 0.9.9 + + .. seealso:: + + :attr:`.Connection.default_isolation_level` - view default level + + :paramref:`.create_engine.isolation_level` + - set per :class:`.Engine` isolation level + + :paramref:`.Connection.execution_options.isolation_level` + - set per :class:`.Connection` isolation level + + """ + try: + return self.dialect.get_isolation_level(self.connection) + except Exception as e: + self._handle_dbapi_exception(e, None, None, None, None) + + @property + def default_isolation_level(self): + """The default isolation level assigned to this :class:`.Connection`. + + This is the isolation level setting that the :class:`.Connection` + has when first procured via the :meth:`.Engine.connect` method. + This level stays in place until the + :paramref:`.Connection.execution_options.isolation_level` is used + to change the setting on a per-:class:`.Connection` basis. + + Unlike :meth:`.Connection.get_isolation_level`, this attribute is set + ahead of time from the first connection procured by the dialect, + so SQL query is not invoked when this accessor is called. + + .. versionadded:: 0.9.9 + + .. seealso:: + + :meth:`.Connection.get_isolation_level` - view current level + + :paramref:`.create_engine.isolation_level` + - set per :class:`.Engine` isolation level + + :paramref:`.Connection.execution_options.isolation_level` + - set per :class:`.Connection` isolation level + + """ + return self.dialect.default_isolation_level def _revalidate_connection(self): + if self.__branch_from: + return self.__branch_from._revalidate_connection() if self.__can_reconnect and self.__invalid: if self.__transaction is not None: raise exc.InvalidRequestError( - "Can't reconnect until invalid " - "transaction is rolled back") - self.__connection = self.engine.raw_connection() + "Can't reconnect until invalid " + "transaction is rolled back") + self.__connection = self.engine.raw_connection(_connection=self) self.__invalid = False return self.__connection raise exc.ResourceClosedError("This Connection is closed") @@ -323,10 +473,10 @@ def invalidate(self, exception=None): :meth:`.Connection.invalidate` method is called, at the DBAPI level all state associated with this transaction is lost, as the DBAPI connection is closed. The :class:`.Connection` - will not allow a reconnection to proceed until the :class:`.Transaction` - object is ended, by calling the :meth:`.Transaction.rollback` - method; until that point, any attempt at continuing to use the - :class:`.Connection` will raise an + will not allow a reconnection to proceed until the + :class:`.Transaction` object is ended, by calling the + :meth:`.Transaction.rollback` method; until that point, any attempt at + continuing to use the :class:`.Connection` will raise an :class:`~sqlalchemy.exc.InvalidRequestError`. This is to prevent applications from accidentally continuing an ongoing transactional operations despite the @@ -334,24 +484,25 @@ def invalidate(self, exception=None): invalidation. The :meth:`.Connection.invalidate` method, just like auto-invalidation, - will at the connection pool level invoke the :meth:`.PoolEvents.invalidate` - event. + will at the connection pool level invoke the + :meth:`.PoolEvents.invalidate` event. .. seealso:: :ref:`pool_connection_invalidation` """ + if self.invalidated: return if self.closed: raise exc.ResourceClosedError("This Connection is closed") - if self._connection_is_valid: - self.__connection.invalidate(exception) - del self.__connection - self.__invalid = True + if self._root._connection_is_valid: + self._root.__connection.invalidate(exception) + del self._root.__connection + self._root.__invalid = True def detach(self): """Detach the underlying DB-API connection from its connection pool. @@ -414,6 +565,8 @@ def begin(self): :class:`.Engine`. """ + if self.__branch_from: + return self.__branch_from.begin() if self.__transaction is None: self.__transaction = RootTransaction(self) @@ -435,6 +588,9 @@ def begin_nested(self): See also :meth:`.Connection.begin`, :meth:`.Connection.begin_twophase`. """ + if self.__branch_from: + return self.__branch_from.begin_nested() + if self.__transaction is None: self.__transaction = RootTransaction(self) else: @@ -458,6 +614,9 @@ def begin_twophase(self, xid=None): """ + if self.__branch_from: + return self.__branch_from.begin_twophase(xid=xid) + if self.__transaction is not None: raise exc.InvalidRequestError( "Cannot start a two phase transaction when a transaction " @@ -478,10 +637,11 @@ def commit_prepared(self, xid, recover=False): def in_transaction(self): """Return True if a transaction is in progress.""" - - return self.__transaction is not None + return self._root.__transaction is not None def _begin_impl(self, transaction): + assert not self.__branch_from + if self._echo: self.engine.logger.info("BEGIN (implicit)") @@ -496,6 +656,8 @@ def _begin_impl(self, transaction): self._handle_dbapi_exception(e, None, None, None, None) def _rollback_impl(self): + assert not self.__branch_from + if self._has_events or self.engine._has_events: self.dispatch.rollback(self) @@ -507,13 +669,16 @@ def _rollback_impl(self): except Exception as e: self._handle_dbapi_exception(e, None, None, None, None) finally: - if self.connection._reset_agent is self.__transaction: + if not self.__invalid and \ + self.connection._reset_agent is self.__transaction: self.connection._reset_agent = None self.__transaction = None else: self.__transaction = None def _commit_impl(self, autocommit=False): + assert not self.__branch_from + if self._has_events or self.engine._has_events: self.dispatch.commit(self) @@ -524,11 +689,14 @@ def _commit_impl(self, autocommit=False): except Exception as e: self._handle_dbapi_exception(e, None, None, None, None) finally: - if self.connection._reset_agent is self.__transaction: + if not self.__invalid and \ + self.connection._reset_agent is self.__transaction: self.connection._reset_agent = None self.__transaction = None def _savepoint_impl(self, name=None): + assert not self.__branch_from + if self._has_events or self.engine._has_events: self.dispatch.savepoint(self, name) @@ -540,6 +708,8 @@ def _savepoint_impl(self, name=None): return name def _rollback_to_savepoint_impl(self, name, context): + assert not self.__branch_from + if self._has_events or self.engine._has_events: self.dispatch.rollback_savepoint(self, name, context) @@ -548,6 +718,8 @@ def _rollback_to_savepoint_impl(self, name, context): self.__transaction = context def _release_savepoint_impl(self, name, context): + assert not self.__branch_from + if self._has_events or self.engine._has_events: self.dispatch.release_savepoint(self, name, context) @@ -556,6 +728,8 @@ def _release_savepoint_impl(self, name, context): self.__transaction = context def _begin_twophase_impl(self, transaction): + assert not self.__branch_from + if self._echo: self.engine.logger.info("BEGIN TWOPHASE (implicit)") if self._has_events or self.engine._has_events: @@ -568,6 +742,8 @@ def _begin_twophase_impl(self, transaction): self.connection._reset_agent = transaction def _prepare_twophase_impl(self, xid): + assert not self.__branch_from + if self._has_events or self.engine._has_events: self.dispatch.prepare_twophase(self, xid) @@ -576,13 +752,16 @@ def _prepare_twophase_impl(self, xid): self.engine.dialect.do_prepare_twophase(self, xid) def _rollback_twophase_impl(self, xid, is_prepared): + assert not self.__branch_from + if self._has_events or self.engine._has_events: self.dispatch.rollback_twophase(self, xid, is_prepared) if self._still_open_and_connection_is_valid: assert isinstance(self.__transaction, TwoPhaseTransaction) try: - self.engine.dialect.do_rollback_twophase(self, xid, is_prepared) + self.engine.dialect.do_rollback_twophase( + self, xid, is_prepared) finally: if self.connection._reset_agent is self.__transaction: self.connection._reset_agent = None @@ -591,6 +770,8 @@ def _rollback_twophase_impl(self, xid, is_prepared): self.__transaction = None def _commit_twophase_impl(self, xid, is_prepared): + assert not self.__branch_from + if self._has_events or self.engine._has_events: self.dispatch.commit_twophase(self, xid, is_prepared) @@ -606,8 +787,8 @@ def _commit_twophase_impl(self, xid, is_prepared): self.__transaction = None def _autorollback(self): - if not self.in_transaction(): - self._rollback_impl() + if not self._root.in_transaction(): + self._root._rollback_impl() def close(self): """Close this :class:`.Connection`. @@ -628,16 +809,29 @@ def close(self): and will allow no further operations. """ + if self.__branch_from: + try: + del self.__connection + except AttributeError: + pass + finally: + self.__can_reconnect = False + return try: conn = self.__connection except AttributeError: pass else: - if not self.__branch: - conn.close() + + conn.close() if conn._reset_agent is self.__transaction: conn._reset_agent = None - del self.__connection + + # the close() process can end up invalidating us, + # as the pool will call our transaction as the "reset_agent" + # for rollback(), which can then cause an invalidation + if not self.__invalid: + del self.__connection self.__can_reconnect = False self.__transaction = None @@ -661,7 +855,7 @@ def execute(self, object, *multiparams, **params): a subclass of :class:`.Executable`, such as a :func:`~.expression.select` construct * a :class:`.FunctionElement`, such as that generated - by :attr:`.func`, will be automatically wrapped in + by :data:`.func`, will be automatically wrapped in a SELECT statement, which is then executed. * a :class:`.DDLElement` object * a :class:`.DefaultGenerator` object @@ -714,8 +908,8 @@ def execute(self, object, *multiparams, **params): meth = object._execute_on_connection except AttributeError: raise exc.InvalidRequestError( - "Unexecutable object type: %s" % - type(object)) + "Unexecutable object type: %s" % + type(object)) else: return meth(self, multiparams, params) @@ -723,7 +917,7 @@ def _execute_function(self, func, multiparams, params): """Execute a sql.FunctionElement object.""" return self._execute_clauseelement(func.select(), - multiparams, params) + multiparams, params) def _execute_default(self, default, multiparams, params): """Execute a schema.ColumnDefault object.""" @@ -741,7 +935,7 @@ def _execute_default(self, default, multiparams, params): dialect = self.dialect ctx = dialect.execution_ctx_cls._init_default( - dialect, self, conn) + dialect, self, conn) except Exception as e: self._handle_dbapi_exception(e, None, None, None, None) @@ -751,7 +945,7 @@ def _execute_default(self, default, multiparams, params): if self._has_events or self.engine._has_events: self.dispatch.after_execute(self, - default, multiparams, params, ret) + default, multiparams, params, ret) return ret @@ -775,7 +969,7 @@ def _execute_ddl(self, ddl, multiparams, params): ) if self._has_events or self.engine._has_events: self.dispatch.after_execute(self, - ddl, multiparams, params, ret) + ddl, multiparams, params, ret) return ret def _execute_clauseelement(self, elem, multiparams, params): @@ -789,25 +983,24 @@ def _execute_clauseelement(self, elem, multiparams, params): distilled_params = _distill_params(multiparams, params) if distilled_params: # note this is usually dict but we support RowProxy - # as well; but dict.keys() as an iterator is OK + # as well; but dict.keys() as an iterable is OK keys = distilled_params[0].keys() else: keys = [] dialect = self.dialect if 'compiled_cache' in self._execution_options: - key = dialect, elem, tuple(keys), len(distilled_params) > 1 - if key in self._execution_options['compiled_cache']: - compiled_sql = self._execution_options['compiled_cache'][key] - else: + key = dialect, elem, tuple(sorted(keys)), len(distilled_params) > 1 + compiled_sql = self._execution_options['compiled_cache'].get(key) + if compiled_sql is None: compiled_sql = elem.compile( - dialect=dialect, column_keys=keys, - inline=len(distilled_params) > 1) + dialect=dialect, column_keys=keys, + inline=len(distilled_params) > 1) self._execution_options['compiled_cache'][key] = compiled_sql else: compiled_sql = elem.compile( - dialect=dialect, column_keys=keys, - inline=len(distilled_params) > 1) + dialect=dialect, column_keys=keys, + inline=len(distilled_params) > 1) ret = self._execute_context( dialect, @@ -818,7 +1011,7 @@ def _execute_clauseelement(self, elem, multiparams, params): ) if self._has_events or self.engine._has_events: self.dispatch.after_execute(self, - elem, multiparams, params, ret) + elem, multiparams, params, ret) return ret def _execute_compiled(self, compiled, multiparams, params): @@ -840,7 +1033,7 @@ def _execute_compiled(self, compiled, multiparams, params): ) if self._has_events or self.engine._has_events: self.dispatch.after_execute(self, - compiled, multiparams, params, ret) + compiled, multiparams, params, ret) return ret def _execute_text(self, statement, multiparams, params): @@ -862,12 +1055,12 @@ def _execute_text(self, statement, multiparams, params): ) if self._has_events or self.engine._has_events: self.dispatch.after_execute(self, - statement, multiparams, params, ret) + statement, multiparams, params, ret) return ret def _execute_context(self, dialect, constructor, - statement, parameters, - *args): + statement, parameters, + *args): """Create an :class:`.ExecutionContext` and execute, returning a :class:`.ResultProxy`.""" @@ -879,16 +1072,17 @@ def _execute_context(self, dialect, constructor, context = constructor(dialect, self, conn, *args) except Exception as e: - self._handle_dbapi_exception(e, - util.text_type(statement), parameters, - None, None) + self._handle_dbapi_exception( + e, + util.text_type(statement), parameters, + None, None) if context.compiled: context.pre_exec() cursor, statement, parameters = context.cursor, \ - context.statement, \ - context.parameters + context.statement, \ + context.parameters if not context.executemany: parameters = parameters[0] @@ -896,96 +1090,82 @@ def _execute_context(self, dialect, constructor, if self._has_events or self.engine._has_events: for fn in self.dispatch.before_cursor_execute: statement, parameters = \ - fn(self, cursor, statement, parameters, - context, context.executemany) + fn(self, cursor, statement, parameters, + context, context.executemany) if self._echo: self.engine.logger.info(statement) - self.engine.logger.info("%r", - sql_util._repr_params(parameters, batches=10)) + self.engine.logger.info( + "%r", + sql_util._repr_params(parameters, batches=10) + ) + + evt_handled = False try: if context.executemany: - for fn in () if not self.dialect._has_events \ - else self.dialect.dispatch.do_executemany: - if fn(cursor, statement, parameters, context): - break - else: + if self.dialect._has_events: + for fn in self.dialect.dispatch.do_executemany: + if fn(cursor, statement, parameters, context): + evt_handled = True + break + if not evt_handled: self.dialect.do_executemany( - cursor, - statement, - parameters, - context) - + cursor, + statement, + parameters, + context) elif not parameters and context.no_parameters: - for fn in () if not self.dialect._has_events \ - else self.dialect.dispatch.do_execute_no_params: - if fn(cursor, statement, context): - break - else: + if self.dialect._has_events: + for fn in self.dialect.dispatch.do_execute_no_params: + if fn(cursor, statement, context): + evt_handled = True + break + if not evt_handled: self.dialect.do_execute_no_params( - cursor, - statement, - context) - + cursor, + statement, + context) else: - for fn in () if not self.dialect._has_events \ - else self.dialect.dispatch.do_execute: - if fn(cursor, statement, parameters, context): - break - else: + if self.dialect._has_events: + for fn in self.dialect.dispatch.do_execute: + if fn(cursor, statement, parameters, context): + evt_handled = True + break + if not evt_handled: self.dialect.do_execute( - cursor, - statement, - parameters, - context) + cursor, + statement, + parameters, + context) except Exception as e: self._handle_dbapi_exception( - e, - statement, - parameters, - cursor, - context) + e, + statement, + parameters, + cursor, + context) if self._has_events or self.engine._has_events: self.dispatch.after_cursor_execute(self, cursor, - statement, - parameters, - context, - context.executemany) + statement, + parameters, + context, + context.executemany) if context.compiled: context.post_exec() - if context.isinsert and not context.executemany: - context.post_insert() - - # create a resultproxy, get rowcount/implicit RETURNING - # rows, close cursor if no further results pending - result = context.get_result_proxy() - if context.isinsert: - if context._is_implicit_returning: - context._fetch_implicit_returning(result) - result.close(_autoclose_connection=False) - result._metadata = None - elif not context._is_explicit_returning: - result.close(_autoclose_connection=False) - result._metadata = None - elif context.isupdate and context._is_implicit_returning: - context._fetch_implicit_update_returning(result) - result.close(_autoclose_connection=False) - result._metadata = None - - elif result._metadata is None: - # no results, get rowcount - # (which requires open cursor on some drivers - # such as kintersbasdb, mxodbc), - result.rowcount - result.close(_autoclose_connection=False) - - if self.__transaction is None and context.should_autocommit: - self._commit_impl(autocommit=True) - - if result.closed and self.should_close_with_result: + if context.is_crud or context.is_text: + result = context._setup_crud_result_proxy() + else: + result = context.get_result_proxy() + if result._metadata is None: + result._soft_close(_autoclose_connection=False) + + if context.should_autocommit and self._root.__transaction is None: + self._root._commit_impl(autocommit=True) + + if result._soft_closed and self.should_close_with_result: self.close() return result @@ -1004,38 +1184,38 @@ def _cursor_execute(self, cursor, statement, parameters, context=None): if self._has_events or self.engine._has_events: for fn in self.dispatch.before_cursor_execute: statement, parameters = \ - fn(self, cursor, statement, parameters, - context, - False) + fn(self, cursor, statement, parameters, + context, + False) if self._echo: self.engine.logger.info(statement) self.engine.logger.info("%r", parameters) try: for fn in () if not self.dialect._has_events \ - else self.dialect.dispatch.do_execute: + else self.dialect.dispatch.do_execute: if fn(cursor, statement, parameters, context): break else: self.dialect.do_execute( - cursor, - statement, - parameters, - context) + cursor, + statement, + parameters, + context) except Exception as e: self._handle_dbapi_exception( - e, - statement, - parameters, - cursor, - context) + e, + statement, + parameters, + cursor, + context) if self._has_events or self.engine._has_events: self.dispatch.after_cursor_execute(self, cursor, - statement, - parameters, - context, - False) + statement, + parameters, + context, + False) def _safe_close_cursor(self, cursor): """Close the given cursor, catching exceptions @@ -1044,52 +1224,108 @@ def _safe_close_cursor(self, cursor): """ try: cursor.close() - except (SystemExit, KeyboardInterrupt): - raise except Exception: - self.connection._logger.error( - "Error closing cursor", exc_info=True) + # log the error through the connection pool's logger. + self.engine.pool.logger.error( + "Error closing cursor", exc_info=True) _reentrant_error = False _is_disconnect = False def _handle_dbapi_exception(self, - e, - statement, - parameters, - cursor, - context): - + e, + statement, + parameters, + cursor, + context): exc_info = sys.exc_info() + if context and context.exception is None: + context.exception = e + if not self._is_disconnect: - self._is_disconnect = isinstance(e, self.dialect.dbapi.Error) and \ + self._is_disconnect = \ + isinstance(e, self.dialect.dbapi.Error) and \ not self.closed and \ - self.dialect.is_disconnect(e, self.__connection, cursor) + self.dialect.is_disconnect( + e, + self.__connection if not self.invalidated else None, + cursor) + if context: + context.is_disconnect = self._is_disconnect + + invalidate_pool_on_disconnect = True if self._reentrant_error: util.raise_from_cause( - exc.DBAPIError.instance(statement, - parameters, - e, - self.dialect.dbapi.Error), - exc_info - ) + exc.DBAPIError.instance(statement, + parameters, + e, + self.dialect.dbapi.Error, + dialect=self.dialect), + exc_info + ) self._reentrant_error = True try: # non-DBAPI error - if we already got a context, - # or theres no string statement, don't wrap it + # or there's no string statement, don't wrap it should_wrap = isinstance(e, self.dialect.dbapi.Error) or \ (statement is not None and context is None) - if should_wrap and context: - if self._has_events or self.engine._has_events: + if should_wrap: + sqlalchemy_exception = exc.DBAPIError.instance( + statement, + parameters, + e, + self.dialect.dbapi.Error, + connection_invalidated=self._is_disconnect, + dialect=self.dialect) + else: + sqlalchemy_exception = None + + newraise = None + + if (self._has_events or self.engine._has_events) and \ + not self._execution_options.get( + 'skip_user_error_events', False): + # legacy dbapi_error event + if should_wrap and context: self.dispatch.dbapi_error(self, - cursor, - statement, - parameters, - context, - e) + cursor, + statement, + parameters, + context, + e) + + # new handle_error event + ctx = ExceptionContextImpl( + e, sqlalchemy_exception, self.engine, + self, cursor, statement, + parameters, context, self._is_disconnect) + + for fn in self.dispatch.handle_error: + try: + # handler returns an exception; + # call next handler in a chain + per_fn = fn(ctx) + if per_fn is not None: + ctx.chained_exception = newraise = per_fn + except Exception as _raised: + # handler raises an exception - stop processing + newraise = _raised + break + + if sqlalchemy_exception and \ + self._is_disconnect != ctx.is_disconnect: + sqlalchemy_exception.connection_invalidated = \ + self._is_disconnect = ctx.is_disconnect + + # set up potentially user-defined value for + # invalidate pool. + invalidate_pool_on_disconnect = \ + ctx.invalidate_pool_on_disconnect + + if should_wrap and context: context.handle_dbapi_exception(e) if not self._is_disconnect: @@ -1097,29 +1333,80 @@ def _handle_dbapi_exception(self, self._safe_close_cursor(cursor) self._autorollback() - if should_wrap: + if newraise: + util.raise_from_cause(newraise, exc_info) + elif should_wrap: util.raise_from_cause( - exc.DBAPIError.instance( - statement, - parameters, - e, - self.dialect.dbapi.Error, - connection_invalidated=self._is_disconnect), - exc_info - ) - - util.reraise(*exc_info) + sqlalchemy_exception, + exc_info + ) + else: + util.reraise(*exc_info) finally: del self._reentrant_error if self._is_disconnect: del self._is_disconnect - dbapi_conn_wrapper = self.connection - self.engine.pool._invalidate(dbapi_conn_wrapper, e) - self.invalidate(e) + if not self.invalidated: + dbapi_conn_wrapper = self.__connection + if invalidate_pool_on_disconnect: + self.engine.pool._invalidate(dbapi_conn_wrapper, e) + self.invalidate(e) if self.should_close_with_result: self.close() + @classmethod + def _handle_dbapi_exception_noconnection(cls, e, dialect, engine): + + exc_info = sys.exc_info() + + is_disconnect = dialect.is_disconnect(e, None, None) + + should_wrap = isinstance(e, dialect.dbapi.Error) + + if should_wrap: + sqlalchemy_exception = exc.DBAPIError.instance( + None, + None, + e, + dialect.dbapi.Error, + connection_invalidated=is_disconnect) + else: + sqlalchemy_exception = None + + newraise = None + + if engine._has_events: + ctx = ExceptionContextImpl( + e, sqlalchemy_exception, engine, None, None, None, + None, None, is_disconnect) + for fn in engine.dispatch.handle_error: + try: + # handler returns an exception; + # call next handler in a chain + per_fn = fn(ctx) + if per_fn is not None: + ctx.chained_exception = newraise = per_fn + except Exception as _raised: + # handler raises an exception - stop processing + newraise = _raised + break + + if sqlalchemy_exception and \ + is_disconnect != ctx.is_disconnect: + sqlalchemy_exception.connection_invalidated = \ + is_disconnect = ctx.is_disconnect + + if newraise: + util.raise_from_cause(newraise, exc_info) + elif should_wrap: + util.raise_from_cause( + sqlalchemy_exception, + exc_info + ) + else: + util.reraise(*exc_info) + def default_schema_name(self): return self.engine.dialect.get_default_schema_name(self) @@ -1191,7 +1478,23 @@ def run_callable(self, callable_, *args, **kwargs): def _run_visitor(self, visitorcallable, element, **kwargs): visitorcallable(self.dialect, self, - **kwargs).traverse_single(element) + **kwargs).traverse_single(element) + + +class ExceptionContextImpl(ExceptionContext): + """Implement the :class:`.ExceptionContext` interface.""" + + def __init__(self, exception, sqlalchemy_exception, + engine, connection, cursor, statement, parameters, + context, is_disconnect): + self.engine = engine + self.connection = connection + self.sqlalchemy_exception = sqlalchemy_exception + self.original_exception = exception + self.execution_context = context + self.statement = statement + self.parameters = parameters + self.is_disconnect = is_disconnect class Transaction(object): @@ -1228,9 +1531,13 @@ class Transaction(object): def __init__(self, connection, parent): self.connection = connection - self._parent = parent or self + self._actual_parent = parent self.is_active = True + @property + def _parent(self): + return self._actual_parent or self + def close(self): """Close this :class:`.Transaction`. @@ -1307,6 +1614,7 @@ class NestedTransaction(Transaction): The interface is the same as that of :class:`.Transaction`. """ + def __init__(self, connection, parent): super(NestedTransaction, self).__init__(connection, parent) self._savepoint = self.connection._savepoint_impl() @@ -1314,12 +1622,12 @@ def __init__(self, connection, parent): def _do_rollback(self): if self.is_active: self.connection._rollback_to_savepoint_impl( - self._savepoint, self._parent) + self._savepoint, self._parent) def _do_commit(self): if self.is_active: self.connection._release_savepoint_impl( - self._savepoint, self._parent) + self._savepoint, self._parent) class TwoPhaseTransaction(Transaction): @@ -1332,6 +1640,7 @@ class TwoPhaseTransaction(Transaction): with the addition of the :meth:`prepare` method. """ + def __init__(self, connection, xid): super(TwoPhaseTransaction, self).__init__(connection, None) self._is_prepared = False @@ -1378,9 +1687,9 @@ class Engine(Connectable, log.Identified): _connection_cls = Connection def __init__(self, pool, dialect, url, - logging_name=None, echo=None, proxy=None, - execution_options=None - ): + logging_name=None, echo=None, proxy=None, + execution_options=None + ): self.pool = pool self.url = url self.dialect = dialect @@ -1413,7 +1722,7 @@ def update_execution_options(self, **opt): """ self._execution_options = \ - self._execution_options.union(opt) + self._execution_options.union(opt) self.dispatch.set_engine_execution_options(self, opt) self.dialect.set_engine_execution_options(self, opt) @@ -1462,7 +1771,8 @@ def execution_options(self, **opt): shards = {"default": "base", shard_1: "db1", "shard_2": "db2"} @event.listens_for(Engine, "before_cursor_execute") - def _switch_shard(conn, cursor, stmt, params, context, executemany): + def _switch_shard(conn, cursor, stmt, + params, context, executemany): shard_id = conn._execution_options.get('shard_id', "default") current_shard = conn.info.get("current_shard", None) @@ -1505,29 +1815,28 @@ def __repr__(self): def dispose(self): """Dispose of the connection pool used by this :class:`.Engine`. + This has the effect of fully closing all **currently checked in** + database connections. Connections that are still checked out + will **not** be closed, however they will no longer be associated + with this :class:`.Engine`, so when they are closed individually, + eventually the :class:`.Pool` which they are associated with will + be garbage collected and they will be closed out fully, if + not already closed on checkin. + A new connection pool is created immediately after the old one has been disposed. This new pool, like all SQLAlchemy connection pools, does not make any actual connections to the database until one is - first requested. - - This method has two general use cases: + first requested, so as long as the :class:`.Engine` isn't used again, + no new connections will be made. - * When a dropped connection is detected, it is assumed that all - connections held by the pool are potentially dropped, and - the entire pool is replaced. - - * An application may want to use :meth:`dispose` within a test - suite that is creating multiple engines. + .. seealso:: - It is critical to note that :meth:`dispose` does **not** guarantee - that the application will release all open database connections - only - those connections that are checked into the pool are closed. - Connections which remain checked out or have been detached from - the engine are not affected. + :ref:`engine_disposal` """ self.pool.dispose() self.pool = self.pool.recreate() + self.dispatch.engine_disposed(self) def _execute_default(self, default): with self.contextual_connect() as conn: @@ -1542,7 +1851,7 @@ def _optional_conn_ctx_manager(self, connection=None): yield connection def _run_visitor(self, visitorcallable, element, - connection=None, **kwargs): + connection=None, **kwargs): with self._optional_conn_ctx_manager(connection) as conn: conn._run_visitor(visitorcallable, element, **kwargs) @@ -1725,10 +2034,11 @@ def contextual_connect(self, close_with_result=False, **kwargs): """ - return self._connection_cls(self, - self.pool.connect(), - close_with_result=close_with_result, - **kwargs) + return self._connection_cls( + self, + self._wrap_pool_connect(self.pool.connect, None), + close_with_result=close_with_result, + **kwargs) def table_names(self, schema=None, connection=None): """Return a list of all table names available in the database. @@ -1749,8 +2059,8 @@ def has_table(self, table_name, schema=None): .. seealso:: - :ref:`metadata_reflection_inspector` - detailed schema inspection using - the :class:`.Inspector` interface. + :ref:`metadata_reflection_inspector` - detailed schema inspection + using the :class:`.Inspector` interface. :class:`.quoted_name` - used to pass quoting information along with a schema identifier. @@ -1758,7 +2068,18 @@ def has_table(self, table_name, schema=None): """ return self.run_callable(self.dialect.has_table, table_name, schema) - def raw_connection(self): + def _wrap_pool_connect(self, fn, connection): + dialect = self.dialect + try: + return fn() + except dialect.dbapi.Error as e: + if connection is None: + Connection._handle_dbapi_exception_noconnection( + e, dialect, self) + else: + util.reraise(*sys.exc_info()) + + def raw_connection(self, _connection=None): """Return a "raw" DBAPI connection from the connection pool. The returned object is a proxied version of the DBAPI @@ -1769,13 +2090,18 @@ def raw_connection(self): for real. This method provides direct DBAPI connection access for - special situations. In most situations, the :class:`.Connection` - object should be used, which is procured using the - :meth:`.Engine.connect` method. + special situations when the API provided by :class:`.Connection` + is not needed. When a :class:`.Connection` object is already + present, the DBAPI connection is available using + the :attr:`.Connection.connection` accessor. - """ + .. seealso:: - return self.pool.unique_connection() + :ref:`dbapi_connections` + + """ + return self._wrap_pool_connect( + self.pool.unique_connection, _connection) class OptionEngine(Engine): diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py index 0fd41105ce..9798d132d4 100644 --- a/lib/sqlalchemy/engine/default.py +++ b/lib/sqlalchemy/engine/default.py @@ -1,5 +1,6 @@ # engine/default.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -23,9 +24,8 @@ from .. import event AUTOCOMMIT_REGEXP = re.compile( - r'\s*(?:UPDATE|INSERT|CREATE|DELETE|DROP|ALTER)', - re.I | re.UNICODE) - + r'\s*(?:UPDATE|INSERT|CREATE|DELETE|DROP|ALTER)', + re.I | re.UNICODE) class DefaultDialect(interfaces.Dialect): @@ -61,14 +61,13 @@ class DefaultDialect(interfaces.Dialect): engine_config_types = util.immutabledict([ ('convert_unicode', util.bool_or_str('force')), - ('pool_timeout', int), + ('pool_timeout', util.asint), ('echo', util.bool_or_str('debug')), ('echo_pool', util.bool_or_str('debug')), - ('pool_recycle', int), - ('pool_size', int), - ('max_overflow', int), - ('pool_threadlocal', bool), - ('use_native_unicode', bool), + ('pool_recycle', util.asint), + ('pool_size', util.asint), + ('max_overflow', util.asint), + ('pool_threadlocal', util.asbool), ]) # if the NUMERIC type @@ -157,6 +156,15 @@ class DefaultDialect(interfaces.Dialect): reflection_options = () + dbapi_exception_translation_map = util.immutabledict() + """mapping used in the extremely unusual case that a DBAPI's + published exceptions don't actually have the __name__ that they + are linked towards. + + .. versionadded:: 1.0.5 + + """ + def __init__(self, convert_unicode=False, encoding='utf-8', paramstyle=None, dbapi=None, implicit_returning=None, @@ -194,26 +202,24 @@ def __init__(self, convert_unicode=False, if label_length and label_length > self.max_identifier_length: raise exc.ArgumentError( - "Label length of %d is greater than this dialect's" - " maximum identifier length of %d" % - (label_length, self.max_identifier_length)) + "Label length of %d is greater than this dialect's" + " maximum identifier length of %d" % + (label_length, self.max_identifier_length)) self.label_length = label_length if self.description_encoding == 'use_encoding': self._description_decoder = \ - processors.to_unicode_processor_factory( - encoding - ) + processors.to_unicode_processor_factory( + encoding + ) elif self.description_encoding is not None: self._description_decoder = \ - processors.to_unicode_processor_factory( - self.description_encoding - ) + processors.to_unicode_processor_factory( + self.description_encoding + ) self._encoder = codecs.getencoder(self.encoding) self._decoder = processors.to_unicode_processor_factory(self.encoding) - - @util.memoized_property def _type_memos(self): return weakref.WeakKeyDictionary() @@ -229,25 +235,25 @@ def get_pool_class(cls, url): def initialize(self, connection): try: self.server_version_info = \ - self._get_server_version_info(connection) + self._get_server_version_info(connection) except NotImplementedError: self.server_version_info = None try: self.default_schema_name = \ - self._get_default_schema_name(connection) + self._get_default_schema_name(connection) except NotImplementedError: self.default_schema_name = None try: self.default_isolation_level = \ - self.get_isolation_level(connection.connection) + self.get_isolation_level(connection.connection) except NotImplementedError: self.default_isolation_level = None self.returns_unicode_strings = self._check_unicode_returns(connection) if self.description_encoding is not None and \ - self._check_unicode_description(connection): + self._check_unicode_description(connection): self._description_decoder = self.description_encoding = None self.do_rollback(connection.connection) @@ -278,7 +284,8 @@ def _check_unicode_returns(self, connection, additional_tests=None): parameters = {} def check_unicode(test): - statement = cast_to(expression.select([test]).compile(dialect=self)) + statement = cast_to( + expression.select([test]).compile(dialect=self)) try: cursor = connection.connection.cursor() connection._cursor_execute(cursor, statement, parameters) @@ -288,7 +295,7 @@ def check_unicode(test): # note that _cursor_execute() will have closed the cursor # if an exception is thrown. util.warn("Exception attempting to " - "detect unicode returns: %r" % de) + "detect unicode returns: %r" % de) return False else: return isinstance(row[0], util.text_type) @@ -299,7 +306,8 @@ def check_unicode(test): expression.literal_column("'test plain returns'"), sqltypes.VARCHAR(60) ), - # detect if there's an NVARCHAR type with different behavior available + # detect if there's an NVARCHAR type with different behavior + # available expression.cast( expression.literal_column("'test unicode returns'"), sqltypes.Unicode(60) @@ -350,7 +358,8 @@ def type_descriptor(self, typeobj): """ return sqltypes.adapt_type(typeobj, self.colspecs) - def reflecttable(self, connection, table, include_columns, exclude_columns): + def reflecttable( + self, connection, table, include_columns, exclude_columns): insp = reflection.Inspector.from_engine(connection) return insp.reflecttable(table, include_columns, exclude_columns) @@ -361,8 +370,8 @@ def get_pk_constraint(self, conn, table_name, schema=None, **kw): """ return { 'constrained_columns': - self.get_primary_keys(conn, table_name, - schema=schema, **kw) + self.get_primary_keys(conn, table_name, + schema=schema, **kw) } def validate_identifier(self, ident): @@ -383,6 +392,7 @@ def create_connect_args(self, url): def set_engine_execution_options(self, engine, opts): if 'isolation_level' in opts: isolation_level = opts['isolation_level'] + @event.listens_for(engine, "engine_connect") def set_isolation(connection, branch): if not branch: @@ -393,11 +403,16 @@ def set_connection_execution_options(self, connection, opts): self._set_connection_isolation(connection, opts['isolation_level']) def _set_connection_isolation(self, connection, level): + if connection.in_transaction(): + util.warn( + "Connection is already established with a Transaction; " + "setting isolation_level may implicitly rollback or commit " + "the existing transaction, or have no effect until " + "next transaction") self.set_isolation_level(connection.connection, level) connection.connection._connection_record.\ finalize_callback.append(self.reset_isolation_level) - def do_begin(self, dbapi_connection): pass @@ -451,14 +466,13 @@ class DefaultExecutionContext(interfaces.ExecutionContext): isinsert = False isupdate = False isdelete = False + is_crud = False + is_text = False isddl = False executemany = False - result_map = None compiled = None statement = None - postfetch_cols = None - prefetch_cols = None - returning_cols = None + result_column_struct = None _is_implicit_returning = False _is_explicit_returning = False @@ -471,10 +485,9 @@ def _init_ddl(cls, dialect, connection, dbapi_connection, compiled_ddl): """Initialize execution context for a DDLElement construct.""" self = cls.__new__(cls) - self.dialect = dialect self.root_connection = connection self._dbapi_connection = dbapi_connection - self.engine = connection.engine + self.dialect = connection.dialect self.compiled = compiled = compiled_ddl self.isddl = True @@ -502,61 +515,60 @@ def _init_ddl(cls, dialect, connection, dbapi_connection, compiled_ddl): @classmethod def _init_compiled(cls, dialect, connection, dbapi_connection, - compiled, parameters): + compiled, parameters): """Initialize execution context for a Compiled construct.""" self = cls.__new__(cls) - self.dialect = dialect self.root_connection = connection self._dbapi_connection = dbapi_connection - self.engine = connection.engine + self.dialect = connection.dialect self.compiled = compiled if not compiled.can_execute: raise exc.ArgumentError("Not an executable clause") - self.execution_options = compiled.statement._execution_options - if connection._execution_options: - self.execution_options = dict(self.execution_options) - self.execution_options.update(connection._execution_options) - - # compiled clauseelement. process bind params, process table defaults, - # track collections used by ResultProxy to target and process results + self.execution_options = compiled.statement._execution_options.union( + connection._execution_options) - self.result_map = compiled.result_map + self.result_column_struct = ( + compiled._result_columns, compiled._ordered_columns) self.unicode_statement = util.text_type(compiled) if not dialect.supports_unicode_statements: self.statement = self.unicode_statement.encode( - self.dialect.encoding) + self.dialect.encoding) else: self.statement = self.unicode_statement self.isinsert = compiled.isinsert self.isupdate = compiled.isupdate self.isdelete = compiled.isdelete - - if self.isinsert or self.isupdate or self.isdelete: - self._is_explicit_returning = bool(compiled.statement._returning) - self._is_implicit_returning = bool(compiled.returning and \ - not compiled.statement._returning) + self.is_text = compiled.isplaintext if not parameters: self.compiled_parameters = [compiled.construct_params()] else: self.compiled_parameters = \ - [compiled.construct_params(m, _group_number=grp) for - grp, m in enumerate(parameters)] + [compiled.construct_params(m, _group_number=grp) for + grp, m in enumerate(parameters)] self.executemany = len(parameters) > 1 self.cursor = self.create_cursor() - if self.isinsert or self.isupdate: - self.postfetch_cols = self.compiled.postfetch - self.prefetch_cols = self.compiled.prefetch - self.returning_cols = self.compiled.returning - self.__process_defaults() + + if self.isinsert or self.isupdate or self.isdelete: + self.is_crud = True + self._is_explicit_returning = bool(compiled.statement._returning) + self._is_implicit_returning = bool( + compiled.returning and not compiled.statement._returning) + + if not self.isdelete: + if self.compiled.prefetch: + if self.executemany: + self._process_executemany_defaults() + else: + self._process_executesingle_defaults() processors = compiled._bind_processors @@ -576,21 +588,28 @@ def _init_compiled(cls, dialect, connection, dbapi_connection, else: encode = not dialect.supports_unicode_statements for compiled_params in self.compiled_parameters: - param = {} + if encode: - for key in compiled_params: - if key in processors: - param[dialect._encoder(key)[0]] = \ - processors[key](compiled_params[key]) - else: - param[dialect._encoder(key)[0]] = \ - compiled_params[key] + param = dict( + ( + dialect._encoder(key)[0], + processors[key](compiled_params[key]) + if key in processors + else compiled_params[key] + ) + for key in compiled_params + ) else: - for key in compiled_params: - if key in processors: - param[key] = processors[key](compiled_params[key]) - else: - param[key] = compiled_params[key] + param = dict( + ( + key, + processors[key](compiled_params[key]) + if key in processors + else compiled_params[key] + ) + for key in compiled_params + ) + parameters.append(param) self.parameters = dialect.execute_sequence_format(parameters) @@ -598,14 +617,14 @@ def _init_compiled(cls, dialect, connection, dbapi_connection, @classmethod def _init_statement(cls, dialect, connection, dbapi_connection, - statement, parameters): + statement, parameters): """Initialize execution context for a string SQL statement.""" self = cls.__new__(cls) - self.dialect = dialect self.root_connection = connection self._dbapi_connection = dbapi_connection - self.engine = connection.engine + self.dialect = connection.dialect + self.is_text = True # plain text statement self.execution_options = connection._execution_options @@ -622,12 +641,12 @@ def _init_statement(cls, dialect, connection, dbapi_connection, self.parameters = parameters else: self.parameters = [ - dict((dialect._encoder(k)[0], d[k]) for k in d) - for d in parameters - ] or [{}] + dict((dialect._encoder(k)[0], d[k]) for k in d) + for d in parameters + ] or [{}] else: self.parameters = [dialect.execute_sequence_format(p) - for p in parameters] + for p in parameters] self.executemany = len(parameters) > 1 @@ -646,21 +665,32 @@ def _init_default(cls, dialect, connection, dbapi_connection): """Initialize execution context for a ColumnDefault construct.""" self = cls.__new__(cls) - self.dialect = dialect self.root_connection = connection self._dbapi_connection = dbapi_connection - self.engine = connection.engine + self.dialect = connection.dialect self.execution_options = connection._execution_options self.cursor = self.create_cursor() return self @util.memoized_property - def no_parameters(self): - return self.execution_options.get("no_parameters", False) + def engine(self): + return self.root_connection.engine + + @util.memoized_property + def postfetch_cols(self): + return self.compiled.postfetch @util.memoized_property - def is_crud(self): - return self.isinsert or self.isupdate or self.isdelete + def prefetch_cols(self): + return self.compiled.prefetch + + @util.memoized_property + def returning_cols(self): + self.compiled.returning + + @util.memoized_property + def no_parameters(self): + return self.execution_options.get("no_parameters", False) @util.memoized_property def should_autocommit(self): @@ -700,9 +730,9 @@ def _execute_scalar(self, stmt, type_): if type_ is not None: # apply type post processors to the result proc = type_._cached_result_processor( - self.dialect, - self.cursor.description[0][1] - ) + self.dialect, + self.cursor.description[0][1] + ) if proc: return proc(r) return r @@ -777,52 +807,96 @@ def supports_sane_rowcount(self): def supports_sane_multi_rowcount(self): return self.dialect.supports_sane_multi_rowcount - def post_insert(self): - if not self._is_implicit_returning and \ - not self._is_explicit_returning and \ - not self.compiled.inline and \ - self.dialect.postfetch_lastrowid and \ - (not self.inserted_primary_key or \ - None in self.inserted_primary_key): + def _setup_crud_result_proxy(self): + if self.isinsert and \ + not self.executemany: + if not self._is_implicit_returning and \ + not self.compiled.inline and \ + self.dialect.postfetch_lastrowid: + + self._setup_ins_pk_from_lastrowid() + + elif not self._is_implicit_returning: + self._setup_ins_pk_from_empty() + + result = self.get_result_proxy() + + if self.isinsert: + if self._is_implicit_returning: + row = result.fetchone() + self.returned_defaults = row + self._setup_ins_pk_from_implicit_returning(row) + result._soft_close(_autoclose_connection=False) + result._metadata = None + elif not self._is_explicit_returning: + result._soft_close(_autoclose_connection=False) + result._metadata = None + elif self.isupdate and self._is_implicit_returning: + row = result.fetchone() + self.returned_defaults = row + result._soft_close(_autoclose_connection=False) + result._metadata = None + + elif result._metadata is None: + # no results, get rowcount + # (which requires open cursor on some drivers + # such as kintersbasdb, mxodbc) + result.rowcount + result._soft_close(_autoclose_connection=False) + return result + + def _setup_ins_pk_from_lastrowid(self): + key_getter = self.compiled._key_getters_for_crud_column[2] + table = self.compiled.statement.table + compiled_params = self.compiled_parameters[0] - table = self.compiled.statement.table - lastrowid = self.get_lastrowid() + lastrowid = self.get_lastrowid() + if lastrowid is not None: autoinc_col = table._autoincrement_column if autoinc_col is not None: # apply type post processors to the lastrowid proc = autoinc_col.type._cached_result_processor( - self.dialect, None) + self.dialect, None) if proc is not None: lastrowid = proc(lastrowid) - self.inserted_primary_key = [ - lastrowid if c is autoinc_col else v - for c, v in zip( - table.primary_key, - self.inserted_primary_key) + lastrowid if c is autoinc_col else + compiled_params.get(key_getter(c), None) + for c in table.primary_key + ] + else: + # don't have a usable lastrowid, so + # do the same as _setup_ins_pk_from_empty + self.inserted_primary_key = [ + compiled_params.get(key_getter(c), None) + for c in table.primary_key ] - def _fetch_implicit_returning(self, resultproxy): + def _setup_ins_pk_from_empty(self): + key_getter = self.compiled._key_getters_for_crud_column[2] table = self.compiled.statement.table - row = resultproxy.fetchone() - - ipk = [] - for c, v in zip(table.primary_key, self.inserted_primary_key): - if v is not None: - ipk.append(v) - else: - ipk.append(row[c]) + compiled_params = self.compiled_parameters[0] + self.inserted_primary_key = [ + compiled_params.get(key_getter(c), None) + for c in table.primary_key + ] - self.inserted_primary_key = ipk - self.returned_defaults = row + def _setup_ins_pk_from_implicit_returning(self, row): + key_getter = self.compiled._key_getters_for_crud_column[2] + table = self.compiled.statement.table + compiled_params = self.compiled_parameters[0] - def _fetch_implicit_update_returning(self, resultproxy): - row = resultproxy.fetchone() - self.returned_defaults = row + self.inserted_primary_key = [ + row[col] if value is None else value + for col, value in [ + (col, compiled_params.get(key_getter(col), None)) + for col in table.primary_key + ] + ] def lastrow_has_defaults(self): return (self.isinsert or self.isupdate) and \ - bool(self.postfetch_cols) + bool(self.compiled.postfetch) def set_input_sizes(self, translate=None, exclude_types=None): """Given a cursor and ClauseParameters, call the appropriate @@ -838,29 +912,29 @@ def set_input_sizes(self, translate=None, exclude_types=None): return types = dict( - (self.compiled.bind_names[bindparam], bindparam.type) - for bindparam in self.compiled.bind_names) + (self.compiled.bind_names[bindparam], bindparam.type) + for bindparam in self.compiled.bind_names) if self.dialect.positional: inputsizes = [] for key in self.compiled.positiontup: typeengine = types[key] dbtype = typeengine.dialect_impl(self.dialect).\ - get_dbapi_type(self.dialect.dbapi) + get_dbapi_type(self.dialect.dbapi) if dbtype is not None and \ - (not exclude_types or dbtype not in exclude_types): + (not exclude_types or dbtype not in exclude_types): inputsizes.append(dbtype) try: self.cursor.setinputsizes(*inputsizes) except Exception as e: self.root_connection._handle_dbapi_exception( - e, None, None, None, self) + e, None, None, None, self) else: inputsizes = {} for key in self.compiled.bind_names.values(): typeengine = types[key] dbtype = typeengine.dialect_impl(self.dialect).\ - get_dbapi_type(self.dialect.dbapi) + get_dbapi_type(self.dialect.dbapi) if dbtype is not None and \ (not exclude_types or dbtype not in exclude_types): if translate: @@ -872,7 +946,7 @@ def set_input_sizes(self, translate=None, exclude_types=None): self.cursor.setinputsizes(**inputsizes) except Exception as e: self.root_connection._handle_dbapi_exception( - e, None, None, None, self) + e, None, None, None, self) def _exec_default(self, default, type_): if default.is_sequence: @@ -900,58 +974,53 @@ def get_update_default(self, column): else: return self._exec_default(column.onupdate, column.type) - def __process_defaults(self): - """Generate default values for compiled insert/update statements, - and generate inserted_primary_key collection. - """ - + def _process_executemany_defaults(self): key_getter = self.compiled._key_getters_for_crud_column[2] - if self.executemany: - if len(self.compiled.prefetch): - scalar_defaults = {} - - # pre-determine scalar Python-side defaults - # to avoid many calls of get_insert_default()/ - # get_update_default() - for c in self.prefetch_cols: - if self.isinsert and c.default and c.default.is_scalar: - scalar_defaults[c] = c.default.arg - elif self.isupdate and c.onupdate and c.onupdate.is_scalar: - scalar_defaults[c] = c.onupdate.arg - - for param in self.compiled_parameters: - self.current_parameters = param - for c in self.prefetch_cols: - if c in scalar_defaults: - val = scalar_defaults[c] - elif self.isinsert: - val = self.get_insert_default(c) - else: - val = self.get_update_default(c) - if val is not None: - param[key_getter(c)] = val - del self.current_parameters - else: - self.current_parameters = compiled_parameters = \ - self.compiled_parameters[0] - - for c in self.compiled.prefetch: - if self.isinsert: + prefetch = self.compiled.prefetch + scalar_defaults = {} + + # pre-determine scalar Python-side defaults + # to avoid many calls of get_insert_default()/ + # get_update_default() + for c in prefetch: + if self.isinsert and c.default and c.default.is_scalar: + scalar_defaults[c] = c.default.arg + elif self.isupdate and c.onupdate and c.onupdate.is_scalar: + scalar_defaults[c] = c.onupdate.arg + + for param in self.compiled_parameters: + self.current_parameters = param + for c in prefetch: + if c in scalar_defaults: + val = scalar_defaults[c] + elif self.isinsert: val = self.get_insert_default(c) else: val = self.get_update_default(c) - if val is not None: - compiled_parameters[key_getter(c)] = val - del self.current_parameters + param[key_getter(c)] = val + del self.current_parameters + + def _process_executesingle_defaults(self): + key_getter = self.compiled._key_getters_for_crud_column[2] + prefetch = self.compiled.prefetch + self.current_parameters = compiled_parameters = \ + self.compiled_parameters[0] + for c in prefetch: if self.isinsert: - self.inserted_primary_key = [ - self.compiled_parameters[0].get(key_getter(c), None) - for c in self.compiled.\ - statement.table.primary_key - ] + if c.default and \ + not c.default.is_sequence and c.default.is_scalar: + val = c.default.arg + else: + val = self.get_insert_default(c) + else: + val = self.get_update_default(c) + + if val is not None: + compiled_parameters[key_getter(c)] = val + del self.current_parameters DefaultDialect.execution_ctx_cls = DefaultExecutionContext diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py index 7372258635..194834230f 100644 --- a/lib/sqlalchemy/engine/interfaces.py +++ b/lib/sqlalchemy/engine/interfaces.py @@ -1,5 +1,6 @@ # engine/interfaces.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -11,6 +12,7 @@ # backwards compat from ..sql.compiler import Compiled, TypeCompiler + class Dialect(object): """Define the behavior of a specific database and DB-API combination. @@ -148,11 +150,20 @@ class Dialect(object): This will prevent types.Boolean from generating a CHECK constraint when that type is used. + dbapi_exception_translation_map + A dictionary of names that will contain as values the names of + pep-249 exceptions ("IntegrityError", "OperationalError", etc) + keyed to alternate class names, to support the case where a + DBAPI has exception classes that aren't named as they are + referred to (e.g. IntegrityError = MyException). In the vast + majority of cases this dictionary is empty. + + .. versionadded:: 1.0.5 + """ _has_events = False - def create_connect_args(self, url): """Build DB-API compatible connection arguments. @@ -189,14 +200,15 @@ def initialize(self, connection): The connection passed here is a SQLAlchemy Connection object, with full capabilities. - The initalize() method of the base dialect should be called via + The initialize() method of the base dialect should be called via super(). """ pass - def reflecttable(self, connection, table, include_columns, exclude_columns): + def reflecttable( + self, connection, table, include_columns, exclude_columns): """Load table description from the database. Given a :class:`.Connection` and a @@ -240,7 +252,9 @@ def get_columns(self, connection, table_name, schema=None, **kw): sequence a dictionary of the form - {'name' : str, 'start' :int, 'increment': int} + {'name' : str, 'start' :int, 'increment': int, 'minvalue': int, + 'maxvalue': int, 'nominvalue': bool, 'nomaxvalue': bool, + 'cycle': bool} Additional column attributes may be present. """ @@ -253,7 +267,8 @@ def get_primary_keys(self, connection, table_name, schema=None, **kw): Deprecated. This method is only called by the default implementation of :meth:`.Dialect.get_pk_constraint`. Dialects should - instead implement the :meth:`.Dialect.get_pk_constraint` method directly. + instead implement the :meth:`.Dialect.get_pk_constraint` method + directly. """ @@ -305,7 +320,15 @@ def get_foreign_keys(self, connection, table_name, schema=None, **kw): def get_table_names(self, connection, schema=None, **kw): """Return a list of table names for `schema`.""" - raise NotImplementedError + raise NotImplementedError() + + def get_temp_table_names(self, connection, schema=None, **kw): + """Return a list of temporary table names on the given connection, + if supported by the underlying backend. + + """ + + raise NotImplementedError() def get_view_names(self, connection, schema=None, **kw): """Return a list of all view names available in the database. @@ -316,6 +339,14 @@ def get_view_names(self, connection, schema=None, **kw): raise NotImplementedError() + def get_temp_view_names(self, connection, schema=None, **kw): + """Return a list of temporary view names on the given connection, + if supported by the underlying backend. + + """ + + raise NotImplementedError() + def get_view_definition(self, connection, view_name, schema=None, **kw): """Return view definition. @@ -345,7 +376,8 @@ def get_indexes(self, connection, table_name, schema=None, **kw): raise NotImplementedError() - def get_unique_constraints(self, connection, table_name, schema=None, **kw): + def get_unique_constraints( + self, connection, table_name, schema=None, **kw): """Return information about unique constraints in `table_name`. Given a string `table_name` and an optional string `schema`, return @@ -358,7 +390,8 @@ def get_unique_constraints(self, connection, table_name, schema=None, **kw): list of column names in order \**kw - other options passed to the dialect's get_unique_constraints() method. + other options passed to the dialect's get_unique_constraints() + method. .. versionadded:: 0.9.0 @@ -443,7 +476,7 @@ def do_begin(self, dbapi_connection): :meth:`.Dialect.do_autocommit` hook is provided for DBAPIs that need some extra commands emitted after a commit in order to enter the next transaction, when the - SQLAlchemy :class:`.Connection` is used in it's default "autocommit" + SQLAlchemy :class:`.Connection` is used in its default "autocommit" mode. :param dbapi_connection: a DBAPI connection, typically @@ -464,7 +497,6 @@ def do_rollback(self, dbapi_connection): raise NotImplementedError() - def do_commit(self, dbapi_connection): """Provide an implementation of ``connection.commit()``, given a DB-API connection. @@ -550,7 +582,7 @@ def do_prepare_twophase(self, connection, xid): raise NotImplementedError() def do_rollback_twophase(self, connection, xid, is_prepared=True, - recover=False): + recover=False): """Rollback a two phase transaction on the given connection. :param connection: a :class:`.Connection`. @@ -564,7 +596,7 @@ def do_rollback_twophase(self, connection, xid, is_prepared=True, raise NotImplementedError() def do_commit_twophase(self, connection, xid, is_prepared=True, - recover=False): + recover=False): """Commit a two phase transaction on the given connection. @@ -634,20 +666,120 @@ def connect(self): return None def reset_isolation_level(self, dbapi_conn): - """Given a DBAPI connection, revert its isolation to the default.""" + """Given a DBAPI connection, revert its isolation to the default. + + Note that this is a dialect-level method which is used as part + of the implementation of the :class:`.Connection` and + :class:`.Engine` + isolation level facilities; these APIs should be preferred for + most typical use cases. + + .. seealso:: + + :meth:`.Connection.get_isolation_level` - view current level + + :attr:`.Connection.default_isolation_level` - view default level + + :paramref:`.Connection.execution_options.isolation_level` - + set per :class:`.Connection` isolation level + + :paramref:`.create_engine.isolation_level` - + set per :class:`.Engine` isolation level + + """ raise NotImplementedError() def set_isolation_level(self, dbapi_conn, level): - """Given a DBAPI connection, set its isolation level.""" + """Given a DBAPI connection, set its isolation level. + + Note that this is a dialect-level method which is used as part + of the implementation of the :class:`.Connection` and + :class:`.Engine` + isolation level facilities; these APIs should be preferred for + most typical use cases. + + .. seealso:: + + :meth:`.Connection.get_isolation_level` - view current level + + :attr:`.Connection.default_isolation_level` - view default level + + :paramref:`.Connection.execution_options.isolation_level` - + set per :class:`.Connection` isolation level + + :paramref:`.create_engine.isolation_level` - + set per :class:`.Engine` isolation level + + """ raise NotImplementedError() def get_isolation_level(self, dbapi_conn): - """Given a DBAPI connection, return its isolation level.""" + """Given a DBAPI connection, return its isolation level. + + When working with a :class:`.Connection` object, the corresponding + DBAPI connection may be procured using the + :attr:`.Connection.connection` accessor. + + Note that this is a dialect-level method which is used as part + of the implementation of the :class:`.Connection` and + :class:`.Engine` isolation level facilities; + these APIs should be preferred for most typical use cases. + + + .. seealso:: + + :meth:`.Connection.get_isolation_level` - view current level + + :attr:`.Connection.default_isolation_level` - view default level + + :paramref:`.Connection.execution_options.isolation_level` - + set per :class:`.Connection` isolation level + + :paramref:`.create_engine.isolation_level` - + set per :class:`.Engine` isolation level + + + """ raise NotImplementedError() + @classmethod + def get_dialect_cls(cls, url): + """Given a URL, return the :class:`.Dialect` that will be used. + + This is a hook that allows an external plugin to provide functionality + around an existing dialect, by allowing the plugin to be loaded + from the url based on an entrypoint, and then the plugin returns + the actual dialect to be used. + + By default this just returns the cls. + + .. versionadded:: 1.0.3 + + """ + return cls + + @classmethod + def engine_created(cls, engine): + """A convenience hook called before returning the final :class:`.Engine`. + + If the dialect returned a different class from the + :meth:`.get_dialect_cls` + method, then the hook is called on both classes, first on + the dialect class returned by the :meth:`.get_dialect_cls` method and + then on the class on which the method was called. + + The hook should be used by dialects and/or wrappers to apply special + events to the engine or its components. In particular, it allows + a dialect-wrapping class to apply dialect-level events. + + .. versionadded:: 1.0.3 + + """ + pass + class ExecutionContext(object): """A messenger object for a Dialect that corresponds to a single @@ -707,6 +839,40 @@ class ExecutionContext(object): and updates. """ + exception = None + """A DBAPI-level exception that was caught when this ExecutionContext + attempted to execute a statement. + + This attribute is meaningful only within the + :meth:`.ConnectionEvents.dbapi_error` event. + + .. versionadded:: 0.9.7 + + .. seealso:: + + :attr:`.ExecutionContext.is_disconnect` + + :meth:`.ConnectionEvents.dbapi_error` + + """ + + is_disconnect = None + """Boolean flag set to True or False when a DBAPI-level exception + is caught when this ExecutionContext attempted to execute a statement. + + This attribute is meaningful only within the + :meth:`.ConnectionEvents.dbapi_error` event. + + .. versionadded:: 0.9.7 + + .. seealso:: + + :attr:`.ExecutionContext.exception` + + :meth:`.ConnectionEvents.dbapi_error` + + """ + def create_cursor(self): """Return a new cursor generated from this ExecutionContext's connection. @@ -842,8 +1008,145 @@ def scalar(self, object, *multiparams, **params): raise NotImplementedError() def _run_visitor(self, visitorcallable, element, - **kwargs): + **kwargs): raise NotImplementedError() def _execute_clauseelement(self, elem, multiparams=None, params=None): raise NotImplementedError() + + +class ExceptionContext(object): + """Encapsulate information about an error condition in progress. + + This object exists solely to be passed to the + :meth:`.ConnectionEvents.handle_error` event, supporting an interface that + can be extended without backwards-incompatibility. + + .. versionadded:: 0.9.7 + + """ + + connection = None + """The :class:`.Connection` in use during the exception. + + This member is present, except in the case of a failure when + first connecting. + + .. seealso:: + + :attr:`.ExceptionContext.engine` + + + """ + + engine = None + """The :class:`.Engine` in use during the exception. + + This member should always be present, even in the case of a failure + when first connecting. + + .. versionadded:: 1.0.0 + + """ + + cursor = None + """The DBAPI cursor object. + + May be None. + + """ + + statement = None + """String SQL statement that was emitted directly to the DBAPI. + + May be None. + + """ + + parameters = None + """Parameter collection that was emitted directly to the DBAPI. + + May be None. + + """ + + original_exception = None + """The exception object which was caught. + + This member is always present. + + """ + + sqlalchemy_exception = None + """The :class:`sqlalchemy.exc.StatementError` which wraps the original, + and will be raised if exception handling is not circumvented by the event. + + May be None, as not all exception types are wrapped by SQLAlchemy. + For DBAPI-level exceptions that subclass the dbapi's Error class, this + field will always be present. + + """ + + chained_exception = None + """The exception that was returned by the previous handler in the + exception chain, if any. + + If present, this exception will be the one ultimately raised by + SQLAlchemy unless a subsequent handler replaces it. + + May be None. + + """ + + execution_context = None + """The :class:`.ExecutionContext` corresponding to the execution + operation in progress. + + This is present for statement execution operations, but not for + operations such as transaction begin/end. It also is not present when + the exception was raised before the :class:`.ExecutionContext` + could be constructed. + + Note that the :attr:`.ExceptionContext.statement` and + :attr:`.ExceptionContext.parameters` members may represent a + different value than that of the :class:`.ExecutionContext`, + potentially in the case where a + :meth:`.ConnectionEvents.before_cursor_execute` event or similar + modified the statement/parameters to be sent. + + May be None. + + """ + + is_disconnect = None + """Represent whether the exception as occurred represents a "disconnect" + condition. + + This flag will always be True or False within the scope of the + :meth:`.ConnectionEvents.handle_error` handler. + + SQLAlchemy will defer to this flag in order to determine whether or not + the connection should be invalidated subsequently. That is, by + assigning to this flag, a "disconnect" event which then results in + a connection and pool invalidation can be invoked or prevented by + changing this flag. + + """ + + invalidate_pool_on_disconnect = True + """Represent whether all connections in the pool should be invalidated + when a "disconnect" condition is in effect. + + Setting this flag to False within the scope of the + :meth:`.ConnectionEvents.handle_error` event will have the effect such + that the full collection of connections in the pool will not be + invalidated during a disconnect; only the current connection that is the + subject of the error will actually be invalidated. + + The purpose of this flag is for custom disconnect-handling schemes where + the invalidation of other connections in the pool is to be performed + based on other conditions, or even on a per-connection basis. + + .. versionadded:: 1.0.3 + + """ diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py index 45f100518e..98fcfa0269 100644 --- a/lib/sqlalchemy/engine/reflection.py +++ b/lib/sqlalchemy/engine/reflection.py @@ -1,5 +1,6 @@ # engine/reflection.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -40,14 +41,14 @@ def cache(fn, self, con, *args, **kw): if info_cache is None: return fn(self, con, *args, **kw) key = ( - fn.__name__, - tuple(a for a in args if isinstance(a, util.string_types)), - tuple((k, v) for k, v in kw.items() if - isinstance(v, - util.string_types + util.int_types + (float, ) - ) - ) - ) + fn.__name__, + tuple(a for a in args if isinstance(a, util.string_types)), + tuple((k, v) for k, v in kw.items() if + isinstance(v, + util.string_types + util.int_types + (float, ) + ) + ) + ) ret = info_cache.get(key) if ret is None: ret = fn(self, con, *args, **kw) @@ -154,7 +155,7 @@ def get_schema_names(self): if hasattr(self.dialect, 'get_schema_names'): return self.dialect.get_schema_names(self.bind, - info_cache=self.info_cache) + info_cache=self.info_cache) return [] def get_table_names(self, schema=None, order_by=None): @@ -172,7 +173,14 @@ def get_table_names(self, schema=None, order_by=None): passed as ``None``. For special quoting, use :class:`.quoted_name`. :param order_by: Optional, may be the string "foreign_key" to sort - the result on foreign key dependencies. + the result on foreign key dependencies. Does not automatically + resolve cycles, and will raise :class:`.CircularDependencyError` + if cycles exist. + + .. deprecated:: 1.0.0 - see + :meth:`.Inspector.get_sorted_table_and_fkc_names` for a version + of this which resolves foreign key cycles between tables + automatically. .. versionchanged:: 0.8 the "foreign_key" sorting sorts tables in order of dependee to dependent; that is, in creation @@ -182,13 +190,15 @@ def get_table_names(self, schema=None, order_by=None): .. seealso:: + :meth:`.Inspector.get_sorted_table_and_fkc_names` + :attr:`.MetaData.sorted_tables` """ if hasattr(self.dialect, 'get_table_names'): - tnames = self.dialect.get_table_names(self.bind, - schema, info_cache=self.info_cache) + tnames = self.dialect.get_table_names( + self.bind, schema, info_cache=self.info_cache) else: tnames = self.engine.table_names(schema) if order_by == 'foreign_key': @@ -200,6 +210,88 @@ def get_table_names(self, schema=None, order_by=None): tnames = list(topological.sort(tuples, tnames)) return tnames + def get_sorted_table_and_fkc_names(self, schema=None): + """Return dependency-sorted table and foreign key constraint names in + referred to within a particular schema. + + This will yield 2-tuples of + ``(tablename, [(tname, fkname), (tname, fkname), ...])`` + consisting of table names in CREATE order grouped with the foreign key + constraint names that are not detected as belonging to a cycle. + The final element + will be ``(None, [(tname, fkname), (tname, fkname), ..])`` + which will consist of remaining + foreign key constraint names that would require a separate CREATE + step after-the-fact, based on dependencies between tables. + + .. versionadded:: 1.0.- + + .. seealso:: + + :meth:`.Inspector.get_table_names` + + :func:`.sort_tables_and_constraints` - similar method which works + with an already-given :class:`.MetaData`. + + """ + if hasattr(self.dialect, 'get_table_names'): + tnames = self.dialect.get_table_names( + self.bind, schema, info_cache=self.info_cache) + else: + tnames = self.engine.table_names(schema) + + tuples = set() + remaining_fkcs = set() + + fknames_for_table = {} + for tname in tnames: + fkeys = self.get_foreign_keys(tname, schema) + fknames_for_table[tname] = set( + [fk['name'] for fk in fkeys] + ) + for fkey in fkeys: + if tname != fkey['referred_table']: + tuples.add((fkey['referred_table'], tname)) + try: + candidate_sort = list(topological.sort(tuples, tnames)) + except exc.CircularDependencyError as err: + for edge in err.edges: + tuples.remove(edge) + remaining_fkcs.update( + (edge[1], fkc) + for fkc in fknames_for_table[edge[1]] + ) + + candidate_sort = list(topological.sort(tuples, tnames)) + return [ + (tname, fknames_for_table[tname].difference(remaining_fkcs)) + for tname in candidate_sort + ] + [(None, list(remaining_fkcs))] + + def get_temp_table_names(self): + """return a list of temporary table names for the current bind. + + This method is unsupported by most dialects; currently + only SQLite implements it. + + .. versionadded:: 1.0.0 + + """ + return self.dialect.get_temp_table_names( + self.bind, info_cache=self.info_cache) + + def get_temp_view_names(self): + """return a list of temporary view names for the current bind. + + This method is unsupported by most dialects; currently + only SQLite implements it. + + .. versionadded:: 1.0.0 + + """ + return self.dialect.get_temp_view_names( + self.bind, info_cache=self.info_cache) + def get_table_options(self, table_name, schema=None, **kw): """Return a dictionary of options specified when the table of the given name was created. @@ -229,7 +321,7 @@ def get_view_names(self, schema=None): """ return self.dialect.get_view_names(self.bind, schema, - info_cache=self.info_cache) + info_cache=self.info_cache) def get_view_definition(self, view_name, schema=None): """Return definition for `view_name`. @@ -292,8 +384,8 @@ def get_primary_keys(self, table_name, schema=None, **kw): """ return self.dialect.get_pk_constraint(self.bind, table_name, schema, - info_cache=self.info_cache, - **kw)['constrained_columns'] + info_cache=self.info_cache, + **kw)['constrained_columns'] def get_pk_constraint(self, table_name, schema=None, **kw): """Return information about primary key constraint on `table_name`. @@ -351,8 +443,8 @@ def get_foreign_keys(self, table_name, schema=None, **kw): """ return self.dialect.get_foreign_keys(self.bind, table_name, schema, - info_cache=self.info_cache, - **kw) + info_cache=self.info_cache, + **kw) def get_indexes(self, table_name, schema=None, **kw): """Return information about indexes in `table_name`. @@ -369,6 +461,12 @@ def get_indexes(self, table_name, schema=None, **kw): unique boolean + dialect_options + dict of dialect-specific index options. May not be present + for all dialects. + + .. versionadded:: 1.0.0 + :param table_name: string name of the table. For special quoting, use :class:`.quoted_name`. @@ -379,8 +477,8 @@ def get_indexes(self, table_name, schema=None, **kw): """ return self.dialect.get_indexes(self.bind, table_name, - schema, - info_cache=self.info_cache, **kw) + schema, + info_cache=self.info_cache, **kw) def get_unique_constraints(self, table_name, schema=None, **kw): """Return information about unique constraints in `table_name`. @@ -445,7 +543,8 @@ def reflecttable(self, table, include_columns, exclude_columns=()): ) # reflect table options, like mysql_engine - tbl_opts = self.get_table_options(table_name, schema, **table.dialect_kwargs) + tbl_opts = self.get_table_options( + table_name, schema, **table.dialect_kwargs) if tbl_opts: # add additional kwargs to the Table if the dialect # returned them @@ -460,58 +559,92 @@ def reflecttable(self, table, include_columns, exclude_columns=()): found_table = False cols_by_orig_name = {} - for col_d in self.get_columns(table_name, schema, **table.dialect_kwargs): + for col_d in self.get_columns( + table_name, schema, **table.dialect_kwargs): found_table = True - orig_name = col_d['name'] - table.dispatch.column_reflect(self, table, col_d) + self._reflect_column( + table, col_d, include_columns, + exclude_columns, cols_by_orig_name) - name = col_d['name'] - if include_columns and name not in include_columns: - continue - if exclude_columns and name in exclude_columns: - continue + if not found_table: + raise exc.NoSuchTableError(table.name) - coltype = col_d['type'] + self._reflect_pk( + table_name, schema, table, cols_by_orig_name, exclude_columns) - col_kw = dict( - (k, col_d[k]) - for k in ['nullable', 'autoincrement', 'quote', 'info', 'key'] - if k in col_d - ) + self._reflect_fk( + table_name, schema, table, cols_by_orig_name, + exclude_columns, reflection_options) - colargs = [] - if col_d.get('default') is not None: - # the "default" value is assumed to be a literal SQL - # expression, so is wrapped in text() so that no quoting - # occurs on re-issuance. - colargs.append( - sa_schema.DefaultClause( - sql.text(col_d['default']), _reflected=True - ) - ) + self._reflect_indexes( + table_name, schema, table, cols_by_orig_name, + include_columns, exclude_columns, reflection_options) - if 'sequence' in col_d: - # TODO: mssql and sybase are using this. - seq = col_d['sequence'] - sequence = sa_schema.Sequence(seq['name'], 1, 1) - if 'start' in seq: - sequence.start = seq['start'] - if 'increment' in seq: - sequence.increment = seq['increment'] - colargs.append(sequence) + self._reflect_unique_constraints( + table_name, schema, table, cols_by_orig_name, + include_columns, exclude_columns, reflection_options) - cols_by_orig_name[orig_name] = col = \ - sa_schema.Column(name, coltype, *colargs, **col_kw) + def _reflect_column( + self, table, col_d, include_columns, + exclude_columns, cols_by_orig_name): - if col.key in table.primary_key: - col.primary_key = True - table.append_column(col) + orig_name = col_d['name'] - if not found_table: - raise exc.NoSuchTableError(table.name) + table.dispatch.column_reflect(self, table, col_d) + + # fetch name again as column_reflect is allowed to + # change it + name = col_d['name'] + if (include_columns and name not in include_columns) \ + or (exclude_columns and name in exclude_columns): + return - pk_cons = self.get_pk_constraint(table_name, schema, **table.dialect_kwargs) + coltype = col_d['type'] + + col_kw = dict( + (k, col_d[k]) + for k in ['nullable', 'autoincrement', 'quote', 'info', 'key'] + if k in col_d + ) + + colargs = [] + if col_d.get('default') is not None: + # the "default" value is assumed to be a literal SQL + # expression, so is wrapped in text() so that no quoting + # occurs on re-issuance. + colargs.append( + sa_schema.DefaultClause( + sql.text(col_d['default']), _reflected=True + ) + ) + + if 'sequence' in col_d: + self._reflect_col_sequence(col_d, colargs) + + cols_by_orig_name[orig_name] = col = \ + sa_schema.Column(name, coltype, *colargs, **col_kw) + + if col.key in table.primary_key: + col.primary_key = True + table.append_column(col) + + def _reflect_col_sequence(self, col_d, colargs): + if 'sequence' in col_d: + # TODO: mssql and sybase are using this. + seq = col_d['sequence'] + sequence = sa_schema.Sequence(seq['name'], 1, 1) + if 'start' in seq: + sequence.start = seq['start'] + if 'increment' in seq: + sequence.increment = seq['increment'] + colargs.append(sequence) + + def _reflect_pk( + self, table_name, schema, table, + cols_by_orig_name, exclude_columns): + pk_cons = self.get_pk_constraint( + table_name, schema, **table.dialect_kwargs) if pk_cons: pk_cols = [ cols_by_orig_name[pk] @@ -523,21 +656,25 @@ def reflecttable(self, table, include_columns, exclude_columns=()): table.primary_key.name = pk_cons.get('name') # tell the PKConstraint to re-initialize - # it's column collection + # its column collection table.primary_key._reload(pk_cols) - fkeys = self.get_foreign_keys(table_name, schema, **table.dialect_kwargs) + def _reflect_fk( + self, table_name, schema, table, cols_by_orig_name, + exclude_columns, reflection_options): + fkeys = self.get_foreign_keys( + table_name, schema, **table.dialect_kwargs) for fkey_d in fkeys: conname = fkey_d['name'] # look for columns by orig name in cols_by_orig_name, # but support columns that are in-Python only as fallback constrained_columns = [ - cols_by_orig_name[c].key - if c in cols_by_orig_name else c - for c in fkey_d['constrained_columns'] - ] + cols_by_orig_name[c].key + if c in cols_by_orig_name else c + for c in fkey_d['constrained_columns'] + ] if exclude_columns and set(constrained_columns).intersection( - exclude_columns): + exclude_columns): continue referred_schema = fkey_d['referred_schema'] referred_table = fkey_d['referred_table'] @@ -567,24 +704,85 @@ def reflecttable(self, table, include_columns, exclude_columns=()): sa_schema.ForeignKeyConstraint(constrained_columns, refspec, conname, link_to_name=True, **options)) + + def _reflect_indexes( + self, table_name, schema, table, cols_by_orig_name, + include_columns, exclude_columns, reflection_options): # Indexes indexes = self.get_indexes(table_name, schema) for index_d in indexes: name = index_d['name'] columns = index_d['column_names'] unique = index_d['unique'] - flavor = index_d.get('type', 'unknown type') + flavor = index_d.get('type', 'index') + dialect_options = index_d.get('dialect_options', {}) + + duplicates = index_d.get('duplicates_constraint') if include_columns and \ - not set(columns).issubset(include_columns): + not set(columns).issubset(include_columns): util.warn( - "Omitting %s KEY for (%s), key covers omitted columns." % + "Omitting %s key for (%s), key covers omitted columns." % (flavor, ', '.join(columns))) continue + if duplicates: + continue # look for columns by orig name in cols_by_orig_name, # but support columns that are in-Python only as fallback - sa_schema.Index(name, *[ - cols_by_orig_name[c] if c in cols_by_orig_name - else table.c[c] - for c in columns - ], - **dict(unique=unique)) + idx_cols = [] + for c in columns: + try: + idx_col = cols_by_orig_name[c] \ + if c in cols_by_orig_name else table.c[c] + except KeyError: + util.warn( + "%s key '%s' was not located in " + "columns for table '%s'" % ( + flavor, c, table_name + )) + else: + idx_cols.append(idx_col) + + sa_schema.Index( + name, *idx_cols, + **dict(list(dialect_options.items()) + [('unique', unique)]) + ) + + def _reflect_unique_constraints( + self, table_name, schema, table, cols_by_orig_name, + include_columns, exclude_columns, reflection_options): + + # Unique Constraints + try: + constraints = self.get_unique_constraints(table_name, schema) + except NotImplementedError: + # optional dialect feature + return + + for const_d in constraints: + conname = const_d['name'] + columns = const_d['column_names'] + duplicates = const_d.get('duplicates_index') + if include_columns and \ + not set(columns).issubset(include_columns): + util.warn( + "Omitting unique constraint key for (%s), " + "key covers omitted columns." % + ', '.join(columns)) + continue + if duplicates: + continue + # look for columns by orig name in cols_by_orig_name, + # but support columns that are in-Python only as fallback + constrained_cols = [] + for c in columns: + try: + constrained_col = cols_by_orig_name[c] \ + if c in cols_by_orig_name else table.c[c] + except KeyError: + util.warn( + "unique constraint key '%s' was not located in " + "columns for table '%s'" % (c, table_name)) + else: + constrained_cols.append(constrained_col) + table.append_constraint( + sa_schema.UniqueConstraint(*constrained_cols, name=conname)) diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py index 6c98dae186..4805015948 100644 --- a/lib/sqlalchemy/engine/result.py +++ b/lib/sqlalchemy/engine/result.py @@ -1,5 +1,6 @@ # engine/result.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -8,7 +9,6 @@ and :class:`.RowProxy.""" - from .. import exc, util from ..sql import expression, sqltypes import collections @@ -74,7 +74,7 @@ def __getitem__(self, key): if isinstance(key, slice): l = [] for processor, value in zip(self._processors[key], - self._row[key]): + self._row[key]): if processor is None: l.append(value) else: @@ -84,8 +84,8 @@ def __getitem__(self, key): raise if index is None: raise exc.InvalidRequestError( - "Ambiguous column name '%s' in result set! " - "try 'use_labels' option on select statement." % key) + "Ambiguous column name '%s' in result set! " + "try 'use_labels' option on select statement." % key) if processor is not None: return processor(self._row[index]) else: @@ -110,7 +110,7 @@ class RowProxy(BaseRowProxy): __slots__ = () def __contains__(self, key): - return self._parent._has_key(self._row, key) + return self._parent._has_key(key) def __getstate__(self): return { @@ -155,7 +155,7 @@ def __repr__(self): def has_key(self, key): """Return True if this RowProxy contains the given key.""" - return self._parent._has_key(self._row, key) + return self._parent._has_key(key) def items(self): """Return a list of tuples, each tuple containing a key/value pair.""" @@ -187,94 +187,170 @@ class ResultMetaData(object): context.""" def __init__(self, parent, metadata): - self._processors = processors = [] - - # We do not strictly need to store the processor in the key mapping, - # though it is faster in the Python version (probably because of the - # saved attribute lookup self._processors) - self._keymap = keymap = {} - self.keys = [] context = parent.context dialect = context.dialect typemap = dialect.dbapi_type_map translate_colname = context._translate_colname - self.case_sensitive = dialect.case_sensitive - - # high precedence key values. - primary_keymap = {} - - for i, rec in enumerate(metadata): - colname = rec[0] - coltype = rec[1] - - if dialect.description_encoding: - colname = dialect._description_decoder(colname) + self.case_sensitive = case_sensitive = dialect.case_sensitive + self._orig_processors = None + if context.result_column_struct: + result_columns, cols_are_ordered = context.result_column_struct + num_ctx_cols = len(result_columns) + else: + num_ctx_cols = None + + if num_ctx_cols and \ + cols_are_ordered and \ + num_ctx_cols == len(metadata): + # case 1 - SQL expression statement, number of columns + # in result matches number of cols in compiled. This is the + # vast majority case for SQL expression constructs. In this + # case we don't bother trying to parse or match up to + # the colnames in the result description. + raw = [ + ( + idx, + key, + name.lower() if not case_sensitive else name, + context.get_result_processor( + type_, key, metadata[idx][1] + ), + obj, + None + ) for idx, (key, name, obj, type_) + in enumerate(result_columns) + ] + self.keys = [ + elem[0] for elem in result_columns + ] + else: + # case 2 - raw string, or number of columns in result does + # not match number of cols in compiled. The raw string case + # is very common. The latter can happen + # when text() is used with only a partial typemap, or + # in the extremely unlikely cases where the compiled construct + # has a single element with multiple col expressions in it + # (e.g. has commas embedded) or there's some kind of statement + # that is adding extra columns. + # In all these cases we fall back to the "named" approach + # that SQLAlchemy has used up through 0.9. + + if num_ctx_cols: + result_map = self._create_result_map( + result_columns, case_sensitive) + + raw = [] + self.keys = [] + untranslated = None + for idx, rec in enumerate(metadata): + colname = rec[0] + coltype = rec[1] + + if dialect.description_encoding: + colname = dialect._description_decoder(colname) + + if translate_colname: + colname, untranslated = translate_colname(colname) + + if dialect.requires_name_normalize: + colname = dialect.normalize_name(colname) + + self.keys.append(colname) + if not case_sensitive: + colname = colname.lower() + + if num_ctx_cols: + try: + ctx_rec = result_map[colname] + except KeyError: + mapped_type = typemap.get(coltype, sqltypes.NULLTYPE) + obj = None + else: + obj = ctx_rec[1] + mapped_type = ctx_rec[2] + else: + mapped_type = typemap.get(coltype, sqltypes.NULLTYPE) + obj = None + processor = context.get_result_processor( + mapped_type, colname, coltype) + + raw.append( + (idx, colname, colname, processor, obj, untranslated) + ) + + # keymap indexes by integer index... + self._keymap = dict([ + (elem[0], (elem[3], elem[4], elem[0])) + for elem in raw + ]) + + # processors in key order for certain per-row + # views like __iter__ and slices + self._processors = [elem[3] for elem in raw] + + if num_ctx_cols: + # keymap by primary string... + by_key = dict([ + (elem[2], (elem[3], elem[4], elem[0])) + for elem in raw + ]) + + # if by-primary-string dictionary smaller (or bigger?!) than + # number of columns, assume we have dupes, rewrite + # dupe records with "None" for index which results in + # ambiguous column exception when accessed. + if len(by_key) != num_ctx_cols: + seen = set() + for rec in raw: + key = rec[1] + if key in seen: + key = key.lower() if not self.case_sensitive else key + by_key[key] = (None, by_key[key][1], None) + seen.add(key) + + # update keymap with secondary "object"-based keys + self._keymap.update([ + (obj_elem, by_key[elem[2]]) + for elem in raw if elem[4] + for obj_elem in elem[4] + ]) + + # update keymap with primary string names taking + # precedence + self._keymap.update(by_key) + else: + self._keymap.update([ + (elem[2], (elem[3], elem[4], elem[0])) + for elem in raw + ]) + # update keymap with "translated" names (sqlite-only thing) if translate_colname: - colname, untranslated = translate_colname(colname) - - if dialect.requires_name_normalize: - colname = dialect.normalize_name(colname) - - if context.result_map: - try: - name, obj, type_ = context.result_map[colname - if self.case_sensitive - else colname.lower()] - except KeyError: - name, obj, type_ = \ - colname, None, typemap.get(coltype, sqltypes.NULLTYPE) + self._keymap.update([ + (elem[5], self._keymap[elem[2]]) + for elem in raw if elem[5] + ]) + + @classmethod + def _create_result_map(cls, result_columns, case_sensitive=True): + d = {} + for elem in result_columns: + key, rec = elem[0], elem[1:] + if not case_sensitive: + key = key.lower() + if key in d: + # conflicting keyname, just double up the list + # of objects. this will cause an "ambiguous name" + # error if an attempt is made by the result set to + # access. + e_name, e_obj, e_type = d[key] + d[key] = e_name, e_obj + rec[1], e_type else: - name, obj, type_ = \ - colname, None, typemap.get(coltype, sqltypes.NULLTYPE) - - processor = context.get_result_processor(type_, colname, coltype) - - processors.append(processor) - rec = (processor, obj, i) - - # indexes as keys. This is only needed for the Python version of - # RowProxy (the C version uses a faster path for integer indexes). - primary_keymap[i] = rec - - # populate primary keymap, looking for conflicts. - if primary_keymap.setdefault( - name if self.case_sensitive - else name.lower(), - rec) is not rec: - # place a record that doesn't have the "index" - this - # is interpreted later as an AmbiguousColumnError, - # but only when actually accessed. Columns - # colliding by name is not a problem if those names - # aren't used; integer access is always - # unambiguous. - primary_keymap[name - if self.case_sensitive - else name.lower()] = rec = (None, obj, None) - - self.keys.append(colname) - if obj: - for o in obj: - keymap[o] = rec - # technically we should be doing this but we - # are saving on callcounts by not doing so. - # if keymap.setdefault(o, rec) is not rec: - # keymap[o] = (None, obj, None) - - if translate_colname and \ - untranslated: - keymap[untranslated] = rec - - # overwrite keymap values with those of the - # high precedence keymap. - keymap.update(primary_keymap) - - if parent._echo: - context.engine.logger.debug( - "Col %r", tuple(x[0] for x in metadata)) + d[key] = rec + return d @util.pending_deprecation("0.8", "sqlite dialect uses " - "_translate_colname() now") + "_translate_colname() now") def _set_keymap_synonym(self, name, origname): """Set a synonym for the given name. @@ -284,8 +360,8 @@ def _set_keymap_synonym(self, name, origname): """ rec = (processor, obj, i) = self._keymap[origname if - self.case_sensitive - else origname.lower()] + self.case_sensitive + else origname.lower()] if self._keymap.setdefault(name, rec) is not rec: self._keymap[name] = (processor, obj, None) @@ -300,26 +376,26 @@ def _key_fallback(self, key, raiseerr=True): # pickle/unpickle roundtrip elif isinstance(key, expression.ColumnElement): if key._label and ( - key._label - if self.case_sensitive - else key._label.lower()) in map: + key._label + if self.case_sensitive + else key._label.lower()) in map: result = map[key._label - if self.case_sensitive - else key._label.lower()] + if self.case_sensitive + else key._label.lower()] elif hasattr(key, 'name') and ( - key.name - if self.case_sensitive - else key.name.lower()) in map: + key.name + if self.case_sensitive + else key.name.lower()) in map: # match is only on name. result = map[key.name - if self.case_sensitive - else key.name.lower()] + if self.case_sensitive + else key.name.lower()] # search extra hard to make sure this # isn't a column/label name overlap. # this check isn't currently available if the row # was unpickled. if result is not None and \ - result[1] is not None: + result[1] is not None: for obj in result[1]: if key._compare_name_for_result(obj): break @@ -329,19 +405,35 @@ def _key_fallback(self, key, raiseerr=True): if raiseerr: raise exc.NoSuchColumnError( "Could not locate column in row for column '%s'" % - expression._string_or_unprintable(key)) + expression._string_or_unprintable(key)) else: return None else: map[key] = result return result - def _has_key(self, row, key): + def _has_key(self, key): if key in self._keymap: return True else: return self._key_fallback(key, False) is not None + def _getter(self, key, raiseerr=True): + if key in self._keymap: + processor, obj, index = self._keymap[key] + else: + ret = self._key_fallback(key, raiseerr) + if ret is None: + return None + processor, obj, index = ret + + if index is None: + raise exc.InvalidRequestError( + "Ambiguous column name '%s' in result set! " + "try 'use_labels' option on select statement." % key) + + return operator.itemgetter(index) + def __getstate__(self): return { '_pickled_keymap': dict( @@ -392,21 +484,49 @@ class ResultProxy(object): out_parameters = None _can_close_connection = False _metadata = None + _soft_closed = False + closed = False def __init__(self, context): self.context = context self.dialect = context.dialect - self.closed = False self.cursor = self._saved_cursor = context.cursor self.connection = context.root_connection self._echo = self.connection._echo and \ - context.engine._should_log_debug() + context.engine._should_log_debug() self._init_metadata() + def _getter(self, key, raiseerr=True): + try: + getter = self._metadata._getter + except AttributeError: + return self._non_result(None) + else: + return getter(key, raiseerr) + + def _has_key(self, key): + try: + has_key = self._metadata._has_key + except AttributeError: + return self._non_result(None) + else: + return has_key(key) + def _init_metadata(self): metadata = self._cursor_description() if metadata is not None: - self._metadata = ResultMetaData(self, metadata) + if self.context.compiled and \ + 'compiled_cache' in self.context.execution_options: + if self.context.compiled._cached_metadata: + self._metadata = self.context.compiled._cached_metadata + else: + self._metadata = self.context.compiled._cached_metadata = \ + ResultMetaData(self, metadata) + else: + self._metadata = ResultMetaData(self, metadata) + if self._echo: + self.context.engine.logger.debug( + "Col %r", tuple(x[0] for x in metadata)) def keys(self): """Return the current set of string keys for rows.""" @@ -460,7 +580,7 @@ def rowcount(self): return self.context.rowcount except Exception as e: self.connection._handle_dbapi_exception( - e, None, None, self.cursor, self.context) + e, None, None, self.cursor, self.context) @property def lastrowid(self): @@ -482,8 +602,8 @@ def lastrowid(self): return self._saved_cursor.lastrowid except Exception as e: self.connection._handle_dbapi_exception( - e, None, None, - self._saved_cursor, self.context) + e, None, None, + self._saved_cursor, self.context) @property def returns_rows(self): @@ -516,39 +636,85 @@ def _cursor_description(self): return self._saved_cursor.description - def close(self, _autoclose_connection=True): - """Close this ResultProxy. - - Closes the underlying DBAPI cursor corresponding to the execution. - - Note that any data cached within this ResultProxy is still available. - For some types of results, this may include buffered rows. + def _soft_close(self, _autoclose_connection=True): + """Soft close this :class:`.ResultProxy`. - If this ResultProxy was generated from an implicit execution, - the underlying Connection will also be closed (returns the - underlying DBAPI connection to the connection pool.) + This releases all DBAPI cursor resources, but leaves the + ResultProxy "open" from a semantic perspective, meaning the + fetchXXX() methods will continue to return empty results. This method is called automatically when: * all result rows are exhausted using the fetchXXX() methods. * cursor.description is None. + This method is **not public**, but is documented in order to clarify + the "autoclose" process used. + + .. versionadded:: 1.0.0 + + .. seealso:: + + :meth:`.ResultProxy.close` + + + """ + if self._soft_closed: + return + self._soft_closed = True + cursor = self.cursor + self.connection._safe_close_cursor(cursor) + if _autoclose_connection and \ + self.connection.should_close_with_result: + self.connection.close() + self.cursor = None + + def close(self): + """Close this ResultProxy. + + This closes out the underlying DBAPI cursor corresonding + to the statement execution, if one is stil present. Note that the + DBAPI cursor is automatically released when the :class:`.ResultProxy` + exhausts all available rows. :meth:`.ResultProxy.close` is generally + an optional method except in the case when discarding a + :class:`.ResultProxy` that still has additional rows pending for fetch. + + In the case of a result that is the product of + :ref:`connectionless execution `, + the underyling :class:`.Connection` object is also closed, which + :term:`releases` DBAPI connection resources. + + After this method is called, it is no longer valid to call upon + the fetch methods, which will raise a :class:`.ResourceClosedError` + on subsequent use. + + .. versionchanged:: 1.0.0 - the :meth:`.ResultProxy.close` method + has been separated out from the process that releases the underlying + DBAPI cursor resource. The "auto close" feature of the + :class:`.Connection` now performs a so-called "soft close", which + releases the underlying DBAPI cursor, but allows the + :class:`.ResultProxy` to still behave as an open-but-exhausted + result set; the actual :meth:`.ResultProxy.close` method is never + called. It is still safe to discard a :class:`.ResultProxy` + that has been fully exhausted without calling this method. + + .. seealso:: + + :ref:`connections_toplevel` + + :meth:`.ResultProxy._soft_close` + """ if not self.closed: + self._soft_close() self.closed = True - self.connection._safe_close_cursor(self.cursor) - if _autoclose_connection and \ - self.connection.should_close_with_result: - self.connection.close() - # allow consistent errors - self.cursor = None def __iter__(self): while True: row = self.fetchone() if row is None: - raise StopIteration + return else: yield row @@ -581,17 +747,17 @@ def inserted_primary_key(self): if not self.context.compiled: raise exc.InvalidRequestError( - "Statement is not a compiled " - "expression construct.") + "Statement is not a compiled " + "expression construct.") elif not self.context.isinsert: raise exc.InvalidRequestError( - "Statement is not an insert() " - "expression construct.") + "Statement is not an insert() " + "expression construct.") elif self.context._is_explicit_returning: raise exc.InvalidRequestError( - "Can't call inserted_primary_key " - "when returning() " - "is used.") + "Can't call inserted_primary_key " + "when returning() " + "is used.") return self.context.inserted_primary_key @@ -606,12 +772,12 @@ def last_updated_params(self): """ if not self.context.compiled: raise exc.InvalidRequestError( - "Statement is not a compiled " - "expression construct.") + "Statement is not a compiled " + "expression construct.") elif not self.context.isupdate: raise exc.InvalidRequestError( - "Statement is not an update() " - "expression construct.") + "Statement is not an update() " + "expression construct.") elif self.context.executemany: return self.context.compiled_parameters else: @@ -628,12 +794,12 @@ def last_inserted_params(self): """ if not self.context.compiled: raise exc.InvalidRequestError( - "Statement is not a compiled " - "expression construct.") + "Statement is not a compiled " + "expression construct.") elif not self.context.isinsert: raise exc.InvalidRequestError( - "Statement is not an insert() " - "expression construct.") + "Statement is not an insert() " + "expression construct.") elif self.context.executemany: return self.context.compiled_parameters else: @@ -681,12 +847,12 @@ def postfetch_cols(self): if not self.context.compiled: raise exc.InvalidRequestError( - "Statement is not a compiled " - "expression construct.") + "Statement is not a compiled " + "expression construct.") elif not self.context.isinsert and not self.context.isupdate: raise exc.InvalidRequestError( - "Statement is not an insert() or update() " - "expression construct.") + "Statement is not an insert() or update() " + "expression construct.") return self.context.postfetch_cols def prefetch_cols(self): @@ -703,12 +869,12 @@ def prefetch_cols(self): if not self.context.compiled: raise exc.InvalidRequestError( - "Statement is not a compiled " - "expression construct.") + "Statement is not a compiled " + "expression construct.") elif not self.context.isinsert and not self.context.isupdate: raise exc.InvalidRequestError( - "Statement is not an insert() or update() " - "expression construct.") + "Statement is not an insert() or update() " + "expression construct.") return self.context.prefetch_cols def supports_sane_rowcount(self): @@ -733,7 +899,7 @@ def _fetchone_impl(self): try: return self.cursor.fetchone() except AttributeError: - self._non_result() + return self._non_result(None) def _fetchmany_impl(self, size=None): try: @@ -742,22 +908,24 @@ def _fetchmany_impl(self, size=None): else: return self.cursor.fetchmany(size) except AttributeError: - self._non_result() + return self._non_result([]) def _fetchall_impl(self): try: return self.cursor.fetchall() except AttributeError: - self._non_result() + return self._non_result([]) - def _non_result(self): + def _non_result(self, default): if self._metadata is None: raise exc.ResourceClosedError( - "This result object does not return rows. " - "It has been closed automatically.", + "This result object does not return rows. " + "It has been closed automatically.", ) - else: + elif self.closed: raise exc.ResourceClosedError("This result object is closed.") + else: + return default def process_rows(self, rows): process_row = self._process_row @@ -776,41 +944,75 @@ def process_rows(self, rows): for row in rows] def fetchall(self): - """Fetch all rows, just like DB-API ``cursor.fetchall()``.""" + """Fetch all rows, just like DB-API ``cursor.fetchall()``. + + After all rows have been exhausted, the underlying DBAPI + cursor resource is released, and the object may be safely + discarded. + + Subsequent calls to :meth:`.ResultProxy.fetchall` will return + an empty list. After the :meth:`.ResultProxy.close` method is + called, the method will raise :class:`.ResourceClosedError`. + + .. versionchanged:: 1.0.0 - Added "soft close" behavior which + allows the result to be used in an "exhausted" state prior to + calling the :meth:`.ResultProxy.close` method. + + """ try: l = self.process_rows(self._fetchall_impl()) - self.close() + self._soft_close() return l except Exception as e: self.connection._handle_dbapi_exception( - e, None, None, - self.cursor, self.context) + e, None, None, + self.cursor, self.context) def fetchmany(self, size=None): """Fetch many rows, just like DB-API ``cursor.fetchmany(size=cursor.arraysize)``. - If rows are present, the cursor remains open after this is called. - Else the cursor is automatically closed and an empty list is returned. + After all rows have been exhausted, the underlying DBAPI + cursor resource is released, and the object may be safely + discarded. + + Calls to :meth:`.ResultProxy.fetchmany` after all rows have been + exhuasted will return + an empty list. After the :meth:`.ResultProxy.close` method is + called, the method will raise :class:`.ResourceClosedError`. + + .. versionchanged:: 1.0.0 - Added "soft close" behavior which + allows the result to be used in an "exhausted" state prior to + calling the :meth:`.ResultProxy.close` method. """ try: l = self.process_rows(self._fetchmany_impl(size)) if len(l) == 0: - self.close() + self._soft_close() return l except Exception as e: self.connection._handle_dbapi_exception( - e, None, None, - self.cursor, self.context) + e, None, None, + self.cursor, self.context) def fetchone(self): """Fetch one row, just like DB-API ``cursor.fetchone()``. - If a row is present, the cursor remains open after this is called. - Else the cursor is automatically closed and None is returned. + After all rows have been exhausted, the underlying DBAPI + cursor resource is released, and the object may be safely + discarded. + + Calls to :meth:`.ResultProxy.fetchone` after all rows have + been exhausted will return ``None``. + After the :meth:`.ResultProxy.close` method is + called, the method will raise :class:`.ResourceClosedError`. + + .. versionchanged:: 1.0.0 - Added "soft close" behavior which + allows the result to be used in an "exhausted" state prior to + calling the :meth:`.ResultProxy.close` method. """ try: @@ -818,28 +1020,31 @@ def fetchone(self): if row is not None: return self.process_rows([row])[0] else: - self.close() + self._soft_close() return None except Exception as e: self.connection._handle_dbapi_exception( - e, None, None, - self.cursor, self.context) + e, None, None, + self.cursor, self.context) def first(self): """Fetch the first row and then close the result set unconditionally. Returns None if no row is present. + After calling this method, the object is fully closed, + e.g. the :meth:`.ResultProxy.close` method will have been called. + """ if self._metadata is None: - self._non_result() + return self._non_result(None) try: row = self._fetchone_impl() except Exception as e: self.connection._handle_dbapi_exception( - e, None, None, - self.cursor, self.context) + e, None, None, + self.cursor, self.context) try: if row is not None: @@ -854,6 +1059,9 @@ def scalar(self): Returns None if no row is present. + After calling this method, the object is fully closed, + e.g. the :meth:`.ResultProxy.close` method will have been called. + """ row = self.first() if row is not None: @@ -874,10 +1082,27 @@ class BufferedRowResultProxy(ResultProxy): The pre-fetching behavior fetches only one row initially, and then grows its buffer size by a fixed amount with each successive need - for additional rows up to a size of 100. + for additional rows up to a size of 1000. + + The size argument is configurable using the ``max_row_buffer`` + execution option:: + + with psycopg2_engine.connect() as conn: + + result = conn.execution_options( + stream_results=True, max_row_buffer=50 + ).execute("select * from table") + + .. versionadded:: 1.0.6 Added the ``max_row_buffer`` option. + + .. seealso:: + + :ref:`psycopg2_execution_options` """ def _init_metadata(self): + self._max_row_buffer = self.context.execution_options.get( + 'max_row_buffer', None) self.__buffer_rows() super(BufferedRowResultProxy, self)._init_metadata() @@ -897,13 +1122,21 @@ def _init_metadata(self): } def __buffer_rows(self): + if self.cursor is None: + return size = getattr(self, '_bufsize', 1) self.__rowbuffer = collections.deque(self.cursor.fetchmany(size)) self._bufsize = self.size_growth.get(size, size) + if self._max_row_buffer is not None: + self._bufsize = min(self._max_row_buffer, self._bufsize) + + def _soft_close(self, **kw): + self.__rowbuffer.clear() + super(BufferedRowResultProxy, self)._soft_close(**kw) def _fetchone_impl(self): - if self.closed: - return None + if self.cursor is None: + return self._non_result(None) if not self.__rowbuffer: self.__buffer_rows() if not self.__rowbuffer: @@ -922,6 +1155,8 @@ def _fetchmany_impl(self, size=None): return result def _fetchall_impl(self): + if self.cursor is None: + return self._non_result([]) self.__rowbuffer.extend(self.cursor.fetchall()) ret = self.__rowbuffer self.__rowbuffer = collections.deque() @@ -936,6 +1171,7 @@ class FullyBufferedResultProxy(ResultProxy): such as MSSQL INSERT...OUTPUT after an autocommit. """ + def _init_metadata(self): super(FullyBufferedResultProxy, self)._init_metadata() self.__rowbuffer = self._buffer_rows() @@ -943,11 +1179,15 @@ def _init_metadata(self): def _buffer_rows(self): return collections.deque(self.cursor.fetchall()) + def _soft_close(self, **kw): + self.__rowbuffer.clear() + super(FullyBufferedResultProxy, self)._soft_close(**kw) + def _fetchone_impl(self): if self.__rowbuffer: return self.__rowbuffer.popleft() else: - return None + return self._non_result(None) def _fetchmany_impl(self, size=None): if size is None: @@ -961,6 +1201,8 @@ def _fetchmany_impl(self, size=None): return result def _fetchall_impl(self): + if not self.cursor: + return self._non_result([]) ret = self.__rowbuffer self.__rowbuffer = collections.deque() return ret @@ -997,16 +1239,21 @@ class BufferedColumnResultProxy(ResultProxy): def _init_metadata(self): super(BufferedColumnResultProxy, self)._init_metadata() + metadata = self._metadata - # orig_processors will be used to preprocess each row when they are - # constructed. - metadata._orig_processors = metadata._processors - # replace the all type processors by None processors. - metadata._processors = [None for _ in range(len(metadata.keys))] - keymap = {} - for k, (func, obj, index) in metadata._keymap.items(): - keymap[k] = (None, obj, index) - self._metadata._keymap = keymap + + # don't double-replace the processors, in the case + # of a cached ResultMetaData + if metadata._orig_processors is None: + # orig_processors will be used to preprocess each row when + # they are constructed. + metadata._orig_processors = metadata._processors + # replace the all type processors by None processors. + metadata._processors = [None for _ in range(len(metadata.keys))] + keymap = {} + for k, (func, obj, index) in metadata._keymap.items(): + keymap[k] = (None, obj, index) + metadata._keymap = keymap def fetchall(self): # can't call cursor.fetchall(), since rows must be diff --git a/lib/sqlalchemy/engine/strategies.py b/lib/sqlalchemy/engine/strategies.py index a8a63bb3d5..2a018f8b16 100644 --- a/lib/sqlalchemy/engine/strategies.py +++ b/lib/sqlalchemy/engine/strategies.py @@ -1,5 +1,6 @@ # engine/strategies.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -47,7 +48,8 @@ def create(self, name_or_url, **kwargs): # create url.URL object u = url.make_url(name_or_url) - dialect_cls = u.get_dialect() + entrypoint = u._get_entrypoint() + dialect_cls = entrypoint.get_dialect_cls(u) if kwargs.pop('_coerce_config', False): def pop_kwarg(key, default=None): @@ -80,21 +82,19 @@ def pop_kwarg(key, default=None): # assemble connection arguments (cargs, cparams) = dialect.create_connect_args(u) cparams.update(pop_kwarg('connect_args', {})) + cargs = list(cargs) # allow mutability # look for existing pool or create pool = pop_kwarg('pool', None) if pool is None: - def connect(): - try: - return dialect.connect(*cargs, **cparams) - except dialect.dbapi.Error as e: - invalidated = dialect.is_disconnect(e, None, None) - util.raise_from_cause( - exc.DBAPIError.instance(None, None, - e, dialect.dbapi.Error, - connection_invalidated=invalidated - ) - ) + def connect(connection_record=None): + if dialect._has_events: + for fn in dialect.dispatch.do_connect: + connection = fn( + dialect, connection_record, cargs, cparams) + if connection is not None: + return connection + return dialect.connect(*cargs, **cparams) creator = pop_kwarg('creator', connect) @@ -160,11 +160,15 @@ def on_connect(dbapi_connection, connection_record): def first_connect(dbapi_connection, connection_record): c = base.Connection(engine, connection=dbapi_connection, - _has_events=False) - + _has_events=False) + c._execution_options = util.immutabledict() dialect.initialize(c) event.listen(pool, 'first_connect', first_connect, once=True) + dialect_cls.engine_created(engine) + if entrypoint is not dialect_cls: + entrypoint.engine_created(engine) + return engine @@ -246,11 +250,11 @@ def drop(self, entity, **kwargs): self.dialect, self, **kwargs).traverse_single(entity) def _run_visitor(self, visitorcallable, element, - connection=None, - **kwargs): + connection=None, + **kwargs): kwargs['checkfirst'] = False visitorcallable(self.dialect, self, - **kwargs).traverse_single(element) + **kwargs).traverse_single(element) def execute(self, object, *multiparams, **params): raise NotImplementedError() diff --git a/lib/sqlalchemy/engine/threadlocal.py b/lib/sqlalchemy/engine/threadlocal.py index ae647a78ef..505d1fadd0 100644 --- a/lib/sqlalchemy/engine/threadlocal.py +++ b/lib/sqlalchemy/engine/threadlocal.py @@ -1,5 +1,6 @@ # engine/threadlocal.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -58,7 +59,10 @@ def contextual_connect(self, **kw): # guards against pool-level reapers, if desired. # or not connection.connection.is_valid: connection = self._tl_connection_cls( - self, self.pool.connect(), **kw) + self, + self._wrap_pool_connect( + self.pool.connect, connection), + **kw) self._connections.conn = weakref.ref(connection) return connection._increment_connect() @@ -94,20 +98,20 @@ def __exit__(self, type, value, traceback): def prepare(self): if not hasattr(self._connections, 'trans') or \ - not self._connections.trans: + not self._connections.trans: return self._connections.trans[-1].prepare() def commit(self): if not hasattr(self._connections, 'trans') or \ - not self._connections.trans: + not self._connections.trans: return trans = self._connections.trans.pop(-1) trans.commit() def rollback(self): if not hasattr(self._connections, 'trans') or \ - not self._connections.trans: + not self._connections.trans: return trans = self._connections.trans.pop(-1) trans.rollback() @@ -119,8 +123,8 @@ def dispose(self): @property def closed(self): return not hasattr(self._connections, 'conn') or \ - self._connections.conn() is None or \ - self._connections.conn().closed + self._connections.conn() is None or \ + self._connections.conn().closed def close(self): if not self.closed: diff --git a/lib/sqlalchemy/engine/url.py b/lib/sqlalchemy/engine/url.py index 78ac061877..3cc2f351f2 100644 --- a/lib/sqlalchemy/engine/url.py +++ b/lib/sqlalchemy/engine/url.py @@ -1,5 +1,6 @@ # engine/url.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -24,8 +25,8 @@ class URL(object): Represent the components of a URL used to connect to a database. This object is suitable to be passed directly to a - :func:`~sqlalchemy.create_engine` call. The fields of the URL are parsed from a - string by the :func:`.make_url` function. the string + :func:`~sqlalchemy.create_engine` call. The fields of the URL are parsed + from a string by the :func:`.make_url` function. the string format of the URL is an RFC-1738-style string. All initialization parameters are available as public attributes. @@ -104,11 +105,25 @@ def __eq__(self, other): self.database == other.database and \ self.query == other.query - def get_dialect(self): - """Return the SQLAlchemy database dialect class corresponding - to this URL's driver name. - """ + def get_backend_name(self): + if '+' not in self.drivername: + return self.drivername + else: + return self.drivername.split('+')[0] + def get_driver_name(self): + if '+' not in self.drivername: + return self.get_dialect().driver + else: + return self.drivername.split('+')[1] + + def _get_entrypoint(self): + """Return the "entry point" dialect class. + + This is normally the dialect itself except in the case when the + returned class implements the get_dialect_cls() method. + + """ if '+' not in self.drivername: name = self.drivername else: @@ -118,12 +133,20 @@ def get_dialect(self): # would return a module with 'dialect' as the # actual class if hasattr(cls, 'dialect') and \ - isinstance(cls.dialect, type) and \ - issubclass(cls.dialect, Dialect): + isinstance(cls.dialect, type) and \ + issubclass(cls.dialect, Dialect): return cls.dialect else: return cls + def get_dialect(self): + """Return the SQLAlchemy database dialect class corresponding + to this URL's driver name. + """ + entrypoint = self._get_entrypoint() + dialect_cls = entrypoint.get_dialect_cls(self) + return dialect_cls + def translate_connect_args(self, names=[], **kw): """Translate url attributes into a dictionary of connection arguments. @@ -188,7 +211,8 @@ def _parse_rfc1738_args(name): if components['database'] is not None: tokens = components['database'].split('?', 2) components['database'] = tokens[0] - query = (len(tokens) > 1 and dict(util.parse_qsl(tokens[1]))) or None + query = ( + len(tokens) > 1 and dict(util.parse_qsl(tokens[1]))) or None if util.py2k and query is not None: query = dict((k.encode('ascii'), query[k]) for k in query) else: @@ -214,9 +238,11 @@ def _parse_rfc1738_args(name): def _rfc_1738_quote(text): return re.sub(r'[:@/]', lambda m: "%%%X" % ord(m.group(0)), text) + def _rfc_1738_unquote(text): return util.unquote(text) + def _parse_keyvalue_args(name): m = re.match(r'(\w+)://(.*)', name) if m is not None: diff --git a/lib/sqlalchemy/engine/util.py b/lib/sqlalchemy/engine/util.py index 6c0644be47..d28d870982 100644 --- a/lib/sqlalchemy/engine/util.py +++ b/lib/sqlalchemy/engine/util.py @@ -1,11 +1,13 @@ # engine/util.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php from .. import util + def connection_memoize(key): """Decorator, memoize a function in a connection.info stash. @@ -60,7 +62,7 @@ def _distill_params(multiparams, params): return [[zero]] else: if hasattr(multiparams[0], '__iter__') and \ - not hasattr(multiparams[0], 'strip'): + not hasattr(multiparams[0], 'strip'): return multiparams else: return [multiparams] diff --git a/lib/sqlalchemy/event/__init__.py b/lib/sqlalchemy/event/__init__.py index b43bf9bfac..dddb924295 100644 --- a/lib/sqlalchemy/event/__init__.py +++ b/lib/sqlalchemy/event/__init__.py @@ -1,5 +1,6 @@ # event/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/lib/sqlalchemy/event/api.py b/lib/sqlalchemy/event/api.py index b27ce79937..0af48dff37 100644 --- a/lib/sqlalchemy/event/api.py +++ b/lib/sqlalchemy/event/api.py @@ -1,5 +1,6 @@ # event/api.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -24,7 +25,8 @@ def _event_key(target, identifier, fn): return _EventKey(target, identifier, fn, tgt) else: raise exc.InvalidRequestError("No such event '%s' for target '%s'" % - (identifier, target)) + (identifier, target)) + def listen(target, identifier, fn, *args, **kw): """Register a listener function for the given target. @@ -53,9 +55,35 @@ def on_config(): event.listen(Mapper, "before_configure", on_config, once=True) - .. versionadded:: 0.9.3 Added ``once=True`` to :func:`.event.listen` + .. versionadded:: 0.9.4 Added ``once=True`` to :func:`.event.listen` and :func:`.event.listens_for`. + .. note:: + + The :func:`.listen` function cannot be called at the same time + that the target event is being run. This has implications + for thread safety, and also means an event cannot be added + from inside the listener function for itself. The list of + events to be run are present inside of a mutable collection + that can't be changed during iteration. + + Event registration and removal is not intended to be a "high + velocity" operation; it is a configurational operation. For + systems that need to quickly associate and deassociate with + events at high scale, use a mutable structure that is handled + from inside of a single listener. + + .. versionchanged:: 1.0.0 - a ``collections.deque()`` object is now + used as the container for the list of events, which explicitly + disallows collection mutation while the collection is being + iterated. + + .. seealso:: + + :func:`.listens_for` + + :func:`.remove` + """ _event_key(target, identifier, fn).listen(*args, **kw) @@ -84,9 +112,13 @@ def on_config(): do_config() - .. versionadded:: 0.9.3 Added ``once=True`` to :func:`.event.listen` + .. versionadded:: 0.9.4 Added ``once=True`` to :func:`.event.listen` and :func:`.event.listens_for`. + .. seealso:: + + :func:`.listen` - general description of event listening + """ def decorate(fn): listen(target, identifier, fn, *args, **kw) @@ -113,14 +145,39 @@ def my_listener_function(*arg): event.remove(SomeMappedClass, "before_insert", my_listener_function) Above, the listener function associated with ``SomeMappedClass`` was also - propagated to subclasses of ``SomeMappedClass``; the :func:`.remove` function - will revert all of these operations. + propagated to subclasses of ``SomeMappedClass``; the :func:`.remove` + function will revert all of these operations. .. versionadded:: 0.9.0 + .. note:: + + The :func:`.remove` function cannot be called at the same time + that the target event is being run. This has implications + for thread safety, and also means an event cannot be removed + from inside the listener function for itself. The list of + events to be run are present inside of a mutable collection + that can't be changed during iteration. + + Event registration and removal is not intended to be a "high + velocity" operation; it is a configurational operation. For + systems that need to quickly associate and deassociate with + events at high scale, use a mutable structure that is handled + from inside of a single listener. + + .. versionchanged:: 1.0.0 - a ``collections.deque()`` object is now + used as the container for the list of events, which explicitly + disallows collection mutation while the collection is being + iterated. + + .. seealso:: + + :func:`.listen` + """ _event_key(target, identifier, fn).remove() + def contains(target, identifier, fn): """Return True if the given target/ident/fn is set up to listen. diff --git a/lib/sqlalchemy/event/attr.py b/lib/sqlalchemy/event/attr.py index b44aeefc70..14940136ab 100644 --- a/lib/sqlalchemy/event/attr.py +++ b/lib/sqlalchemy/event/attr.py @@ -1,5 +1,6 @@ # event/attr.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -36,32 +37,36 @@ from . import legacy from itertools import chain import weakref +import collections -class RefCollection(object): - @util.memoized_property - def ref(self): +class RefCollection(util.MemoizedSlots): + __slots__ = 'ref', + + def _memoized_attr_ref(self): return weakref.ref(self, registry._collection_gced) -class _DispatchDescriptor(RefCollection): - """Class-level attributes on :class:`._Dispatch` classes.""" + +class _ClsLevelDispatch(RefCollection): + """Class-level events on :class:`._Dispatch` classes.""" + + __slots__ = ('name', 'arg_names', 'has_kw', + 'legacy_signatures', '_clslevel', '__weakref__') def __init__(self, parent_dispatch_cls, fn): - self.__name__ = fn.__name__ + self.name = fn.__name__ argspec = util.inspect_getargspec(fn) self.arg_names = argspec.args[1:] self.has_kw = bool(argspec.keywords) self.legacy_signatures = list(reversed( - sorted( - getattr(fn, '_legacy_signatures', []), - key=lambda s: s[0] - ) - )) - self.__doc__ = fn.__doc__ = legacy._augment_fn_docs( - self, parent_dispatch_cls, fn) + sorted( + getattr(fn, '_legacy_signatures', []), + key=lambda s: s[0] + ) + )) + fn.__doc__ = legacy._augment_fn_docs(self, parent_dispatch_cls, fn) self._clslevel = weakref.WeakKeyDictionary() - self._empty_listeners = weakref.WeakKeyDictionary() def _adjust_fn_spec(self, fn, named): if named: @@ -82,11 +87,10 @@ def wrap_kw(*args, **kw): return fn(**argdict) return wrap_kw - def insert(self, event_key, propagate): target = event_key.dispatch_target assert isinstance(target, type), \ - "Class-level Event targets must be classes." + "Class-level Event targets must be classes." stack = [target] while stack: cls = stack.pop(0) @@ -95,14 +99,14 @@ def insert(self, event_key, propagate): self.update_subclass(cls) else: if cls not in self._clslevel: - self._clslevel[cls] = [] - self._clslevel[cls].insert(0, event_key._listen_fn) + self._clslevel[cls] = collections.deque() + self._clslevel[cls].appendleft(event_key._listen_fn) registry._stored_in_collection(event_key, self) def append(self, event_key, propagate): target = event_key.dispatch_target assert isinstance(target, type), \ - "Class-level Event targets must be classes." + "Class-level Event targets must be classes." stack = [target] while stack: @@ -112,13 +116,13 @@ def append(self, event_key, propagate): self.update_subclass(cls) else: if cls not in self._clslevel: - self._clslevel[cls] = [] + self._clslevel[cls] = collections.deque() self._clslevel[cls].append(event_key._listen_fn) registry._stored_in_collection(event_key, self) def update_subclass(self, target): if target not in self._clslevel: - self._clslevel[target] = [] + self._clslevel[target] = collections.deque() clslevel = self._clslevel[target] for cls in target.__mro__[1:]: if cls in self._clslevel: @@ -144,53 +148,47 @@ def clear(self): to_clear = set() for dispatcher in self._clslevel.values(): to_clear.update(dispatcher) - dispatcher[:] = [] + dispatcher.clear() registry._clear(self, to_clear) def for_modify(self, obj): """Return an event collection which can be modified. - For _DispatchDescriptor at the class level of + For _ClsLevelDispatch at the class level of a dispatcher, this returns self. """ return self - def __get__(self, obj, cls): - if obj is None: - return self - elif obj._parent_cls in self._empty_listeners: - ret = self._empty_listeners[obj._parent_cls] - else: - self._empty_listeners[obj._parent_cls] = ret = \ - _EmptyListener(self, obj._parent_cls) - # assigning it to __dict__ means - # memoized for fast re-access. but more memory. - obj.__dict__[self.__name__] = ret - return ret - -class _HasParentDispatchDescriptor(object): + +class _InstanceLevelDispatch(RefCollection): + __slots__ = () + def _adjust_fn_spec(self, fn, named): return self.parent._adjust_fn_spec(fn, named) -class _EmptyListener(_HasParentDispatchDescriptor): - """Serves as a class-level interface to the events - served by a _DispatchDescriptor, when there are no + +class _EmptyListener(_InstanceLevelDispatch): + """Serves as a proxy interface to the events + served by a _ClsLevelDispatch, when there are no instance-level events present. Is replaced by _ListenerCollection when instance-level events are added. """ + + propagate = frozenset() + listeners = () + + __slots__ = 'parent', 'parent_listeners', 'name' + def __init__(self, parent, target_cls): if target_cls not in parent._clslevel: parent.update_subclass(target_cls) - self.parent = parent # _DispatchDescriptor + self.parent = parent # _ClsLevelDispatch self.parent_listeners = parent._clslevel[target_cls] - self.name = parent.__name__ - self.propagate = frozenset() - self.listeners = () - + self.name = parent.name def for_modify(self, obj): """Return an event collection which can be modified. @@ -201,9 +199,11 @@ def for_modify(self, obj): and returns it. """ - result = _ListenerCollection(self.parent, obj._parent_cls) - if obj.__dict__[self.name] is self: - obj.__dict__[self.name] = result + result = _ListenerCollection(self.parent, obj._instance_cls) + if getattr(obj, self.name) is self: + setattr(obj, self.name, result) + else: + assert isinstance(getattr(obj, self.name), _JoinedListener) return result def _needs_modify(self, *args, **kw): @@ -229,11 +229,10 @@ def __bool__(self): __nonzero__ = __bool__ -class _CompoundListener(_HasParentDispatchDescriptor): - _exec_once = False +class _CompoundListener(_InstanceLevelDispatch): + __slots__ = '_exec_once_mutex', '_exec_once' - @util.memoized_property - def _exec_once_mutex(self): + def _memoized_attr__exec_once_mutex(self): return threading.Lock() def exec_once(self, *args, **kw): @@ -267,7 +266,8 @@ def __bool__(self): __nonzero__ = __bool__ -class _ListenerCollection(RefCollection, _CompoundListener): + +class _ListenerCollection(_CompoundListener): """Instance-level attributes on instances of :class:`._Dispatch`. Represents a collection of listeners. @@ -277,13 +277,18 @@ class _ListenerCollection(RefCollection, _CompoundListener): """ + __slots__ = ( + 'parent_listeners', 'parent', 'name', 'listeners', + 'propagate', '__weakref__') + def __init__(self, parent, target_cls): if target_cls not in parent._clslevel: parent.update_subclass(target_cls) + self._exec_once = False self.parent_listeners = parent._clslevel[target_cls] self.parent = parent - self.name = parent.__name__ - self.listeners = [] + self.name = parent.name + self.listeners = collections.deque() self.propagate = set() def for_modify(self, obj): @@ -303,10 +308,10 @@ def _update(self, other, only_propagate=True): existing_listener_set = set(existing_listeners) self.propagate.update(other.propagate) other_listeners = [l for l - in other.listeners - if l not in existing_listener_set - and not only_propagate or l in self.propagate - ] + in other.listeners + if l not in existing_listener_set + and not only_propagate or l in self.propagate + ] existing_listeners.extend(other_listeners) @@ -314,14 +319,12 @@ def _update(self, other, only_propagate=True): registry._stored_in_collection_multi(self, other, to_associate) def insert(self, event_key, propagate): - if event_key._listen_fn not in self.listeners: - event_key.prepend_to_list(self, self.listeners) + if event_key.prepend_to_list(self, self.listeners): if propagate: self.propagate.add(event_key._listen_fn) def append(self, event_key, propagate): - if event_key._listen_fn not in self.listeners: - event_key.append_to_list(self, self.listeners) + if event_key.append_to_list(self, self.listeners): if propagate: self.propagate.add(event_key._listen_fn) @@ -333,28 +336,14 @@ def remove(self, event_key): def clear(self): registry._clear(self, self.listeners) self.propagate.clear() - self.listeners[:] = [] - - -class _JoinedDispatchDescriptor(object): - def __init__(self, name): - self.name = name - - def __get__(self, obj, cls): - if obj is None: - return self - else: - obj.__dict__[self.name] = ret = _JoinedListener( - obj.parent, self.name, - getattr(obj.local, self.name) - ) - return ret + self.listeners.clear() class _JoinedListener(_CompoundListener): - _exec_once = False + __slots__ = 'parent', 'name', 'local', 'parent_listeners' def __init__(self, parent, name, local): + self._exec_once = False self.parent = parent self.name = name self.local = local @@ -382,5 +371,3 @@ def remove(self, event_key): def clear(self): raise NotImplementedError() - - diff --git a/lib/sqlalchemy/event/base.py b/lib/sqlalchemy/event/base.py index 5c8d92cb33..81ef5d89c8 100644 --- a/lib/sqlalchemy/event/base.py +++ b/lib/sqlalchemy/event/base.py @@ -1,5 +1,6 @@ # event/base.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -7,7 +8,7 @@ """Base implementation classes. The public-facing ``Events`` serves as the base class for an event interface; -it's public attributes represent different kinds of events. These attributes +its public attributes represent different kinds of events. These attributes are mirrored onto a ``_Dispatch`` class, which serves as a container for collections of listener functions. These collections are represented both at the class level of a particular ``_Dispatch`` class as well as within @@ -16,8 +17,11 @@ """ from __future__ import absolute_import +import weakref + from .. import util -from .attr import _JoinedDispatchDescriptor, _EmptyListener, _DispatchDescriptor +from .attr import _JoinedListener, \ + _EmptyListener, _ClsLevelDispatch _registrars = util.defaultdict(list) @@ -31,10 +35,12 @@ class _UnpickleDispatch(object): :class:`_Dispatch` given a particular :class:`.Events` subclass. """ - def __call__(self, _parent_cls): - for cls in _parent_cls.__mro__: + + def __call__(self, _instance_cls): + for cls in _instance_cls.__mro__: if 'dispatch' in cls.__dict__: - return cls.__dict__['dispatch'].dispatch_cls(_parent_cls) + return cls.__dict__['dispatch'].\ + dispatch_cls._for_class(_instance_cls) else: raise AttributeError("No class with a 'dispatch' member present.") @@ -59,16 +65,53 @@ class defined, by the :func:`._create_dispatcher_class` function. """ - _events = None - """reference the :class:`.Events` class which this - :class:`._Dispatch` is created for.""" + # in one ORM edge case, an attribute is added to _Dispatch, + # so __dict__ is used in just that case and potentially others. + __slots__ = '_parent', '_instance_cls', '__dict__', '_empty_listeners' + + _empty_listener_reg = weakref.WeakKeyDictionary() + + def __init__(self, parent, instance_cls=None): + self._parent = parent + self._instance_cls = instance_cls + if instance_cls: + try: + self._empty_listeners = self._empty_listener_reg[instance_cls] + except KeyError: + self._empty_listeners = \ + self._empty_listener_reg[instance_cls] = dict( + (ls.name, _EmptyListener(ls, instance_cls)) + for ls in parent._event_descriptors + ) + else: + self._empty_listeners = {} + + def __getattr__(self, name): + # assign EmptyListeners as attributes on demand + # to reduce startup time for new dispatch objects + try: + ls = self._empty_listeners[name] + except KeyError: + raise AttributeError(name) + else: + setattr(self, ls.name, ls) + return ls - def __init__(self, _parent_cls): - self._parent_cls = _parent_cls + @property + def _event_descriptors(self): + for k in self._event_names: + yield getattr(self, k) - @util.classproperty - def _listen(cls): - return cls._events._listen + def _for_class(self, instance_cls): + return self.__class__(self, instance_cls) + + def _for_instance(self, instance): + instance_cls = instance.__class__ + return self._for_class(instance_cls) + + @property + def _listen(self): + return self._events._listen def _join(self, other): """Create a 'join' of this :class:`._Dispatch` and another. @@ -79,37 +122,28 @@ def _join(self, other): """ if '_joined_dispatch_cls' not in self.__class__.__dict__: cls = type( - "Joined%s" % self.__class__.__name__, - (_JoinedDispatcher, self.__class__), {} - ) - for ls in _event_descriptors(self): - setattr(cls, ls.name, _JoinedDispatchDescriptor(ls.name)) + "Joined%s" % self.__class__.__name__, + (_JoinedDispatcher, ), {'__slots__': self._event_names} + ) self.__class__._joined_dispatch_cls = cls return self._joined_dispatch_cls(self, other) def __reduce__(self): - return _UnpickleDispatch(), (self._parent_cls, ) + return _UnpickleDispatch(), (self._instance_cls, ) def _update(self, other, only_propagate=True): """Populate from the listeners in another :class:`_Dispatch` object.""" - - for ls in _event_descriptors(other): + for ls in other._event_descriptors: if isinstance(ls, _EmptyListener): continue getattr(self, ls.name).\ for_modify(self)._update(ls, only_propagate=only_propagate) - @util.hybridmethod def _clear(self): - for attr in dir(self): - if _is_event_name(attr): - getattr(self, attr).for_modify(self).clear() - - -def _event_descriptors(target): - return [getattr(target, k) for k in dir(target) if _is_event_name(k)] + for ls in self._event_descriptors: + ls.for_modify(self).clear() class _EventMeta(type): @@ -128,26 +162,38 @@ def _create_dispatcher_class(cls, classname, bases, dict_): # there's all kinds of ways to do this, # i.e. make a Dispatch class that shares the '_listen' method # of the Event class, this is the straight monkeypatch. - dispatch_base = getattr(cls, 'dispatch', _Dispatch) + if hasattr(cls, 'dispatch'): + dispatch_base = cls.dispatch.__class__ + else: + dispatch_base = _Dispatch + + event_names = [k for k in dict_ if _is_event_name(k)] dispatch_cls = type("%sDispatch" % classname, - (dispatch_base, ), {}) - cls._set_dispatch(cls, dispatch_cls) + (dispatch_base, ), {'__slots__': event_names}) - for k in dict_: - if _is_event_name(k): - setattr(dispatch_cls, k, _DispatchDescriptor(cls, dict_[k])) - _registrars[k].append(cls) + dispatch_cls._event_names = event_names + + dispatch_inst = cls._set_dispatch(cls, dispatch_cls) + for k in dispatch_cls._event_names: + setattr(dispatch_inst, k, _ClsLevelDispatch(cls, dict_[k])) + _registrars[k].append(cls) + + for super_ in dispatch_cls.__bases__: + if issubclass(super_, _Dispatch) and super_ is not _Dispatch: + for ls in super_._events.dispatch._event_descriptors: + setattr(dispatch_inst, ls.name, ls) + dispatch_cls._event_names.append(ls.name) if getattr(cls, '_dispatch_target', None): cls._dispatch_target.dispatch = dispatcher(cls) def _remove_dispatcher(cls): - for k in dir(cls): - if _is_event_name(k): - _registrars[k].remove(cls) - if not _registrars[k]: - del _registrars[k] + for k in cls.dispatch._event_names: + _registrars[k].remove(cls) + if not _registrars[k]: + del _registrars[k] + class Events(util.with_metaclass(_EventMeta, object)): """Define event listening functions for a particular target type.""" @@ -159,19 +205,31 @@ def _set_dispatch(cls, dispatch_cls): # "self.dispatch._events." # @staticemethod to allow easy "super" calls while in a metaclass # constructor. - cls.dispatch = dispatch_cls + cls.dispatch = dispatch_cls(None) dispatch_cls._events = cls - + return cls.dispatch @classmethod def _accept_with(cls, target): # Mapper, ClassManager, Session override this to # also accept classes, scoped_sessions, sessionmakers, etc. if hasattr(target, 'dispatch') and ( - isinstance(target.dispatch, cls.dispatch) or \ - isinstance(target.dispatch, type) and \ - issubclass(target.dispatch, cls.dispatch) - ): + + isinstance(target.dispatch, cls.dispatch.__class__) or + + + ( + isinstance(target.dispatch, type) and + isinstance(target.dispatch, cls.dispatch.__class__) + ) or + + ( + isinstance(target.dispatch, _JoinedDispatcher) and + isinstance(target.dispatch.parent, cls.dispatch.__class__) + ) + + + ): return target else: return None @@ -192,10 +250,24 @@ def _clear(cls): class _JoinedDispatcher(object): """Represent a connection between two _Dispatch objects.""" + __slots__ = 'local', 'parent', '_instance_cls' + def __init__(self, local, parent): self.local = local self.parent = parent - self._parent_cls = local._parent_cls + self._instance_cls = self.local._instance_cls + + def __getattr__(self, name): + # assign _JoinedListeners as attributes on demand + # to reduce startup time for new dispatch objects + ls = getattr(self.local, name) + jl = _JoinedListener(self.parent, ls.name, ls) + setattr(self, ls.name, jl) + return jl + + @property + def _listen(self): + return self.parent._listen class dispatcher(object): @@ -205,6 +277,7 @@ class dispatcher(object): instances. """ + def __init__(self, events): self.dispatch_cls = events.dispatch self.events = events @@ -212,6 +285,5 @@ def __init__(self, events): def __get__(self, obj, cls): if obj is None: return self.dispatch_cls - obj.__dict__['dispatch'] = disp = self.dispatch_cls(cls) + obj.__dict__['dispatch'] = disp = self.dispatch_cls._for_instance(obj) return disp - diff --git a/lib/sqlalchemy/event/legacy.py b/lib/sqlalchemy/event/legacy.py index d8a66674d9..b359bf48ac 100644 --- a/lib/sqlalchemy/event/legacy.py +++ b/lib/sqlalchemy/event/legacy.py @@ -1,5 +1,6 @@ # event/legacy.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -11,6 +12,7 @@ from .. import util + def _legacy_signature(since, argnames, converter=None): def leg(fn): if not hasattr(fn, '_legacy_signatures'): @@ -19,8 +21,9 @@ def leg(fn): return fn return leg -def _wrap_fn_for_legacy(dispatch_descriptor, fn, argspec): - for since, argnames, conv in dispatch_descriptor.legacy_signatures: + +def _wrap_fn_for_legacy(dispatch_collection, fn, argspec): + for since, argnames, conv in dispatch_collection.legacy_signatures: if argnames[-1] == "**kw": has_kw = True argnames = argnames[0:-1] @@ -28,15 +31,16 @@ def _wrap_fn_for_legacy(dispatch_descriptor, fn, argspec): has_kw = False if len(argnames) == len(argspec.args) \ - and has_kw is bool(argspec.keywords): + and has_kw is bool(argspec.keywords): if conv: assert not has_kw + def wrap_leg(*args): return fn(*conv(*args)) else: def wrap_leg(*args, **kw): - argdict = dict(zip(dispatch_descriptor.arg_names, args)) + argdict = dict(zip(dispatch_collection.arg_names, args)) args = [argdict[name] for name in argnames] if has_kw: return fn(*args, **kw) @@ -46,38 +50,42 @@ def wrap_leg(*args, **kw): else: return fn + def _indent(text, indent): return "\n".join( - indent + line - for line in text.split("\n") - ) + indent + line + for line in text.split("\n") + ) + -def _standard_listen_example(dispatch_descriptor, sample_target, fn): +def _standard_listen_example(dispatch_collection, sample_target, fn): example_kw_arg = _indent( - "\n".join( - "%(arg)s = kw['%(arg)s']" % {"arg": arg} - for arg in dispatch_descriptor.arg_names[0:2] - ), - " ") - if dispatch_descriptor.legacy_signatures: + "\n".join( + "%(arg)s = kw['%(arg)s']" % {"arg": arg} + for arg in dispatch_collection.arg_names[0:2] + ), + " ") + if dispatch_collection.legacy_signatures: current_since = max(since for since, args, conv - in dispatch_descriptor.legacy_signatures) + in dispatch_collection.legacy_signatures) else: current_since = None text = ( - "from sqlalchemy import event\n\n" - "# standard decorator style%(current_since)s\n" - "@event.listens_for(%(sample_target)s, '%(event_name)s')\n" - "def receive_%(event_name)s(%(named_event_arguments)s%(has_kw_arguments)s):\n" - " \"listen for the '%(event_name)s' event\"\n" - "\n # ... (event handling logic) ...\n" + "from sqlalchemy import event\n\n" + "# standard decorator style%(current_since)s\n" + "@event.listens_for(%(sample_target)s, '%(event_name)s')\n" + "def receive_%(event_name)s(" + "%(named_event_arguments)s%(has_kw_arguments)s):\n" + " \"listen for the '%(event_name)s' event\"\n" + "\n # ... (event handling logic) ...\n" ) - if len(dispatch_descriptor.arg_names) > 3: + if len(dispatch_collection.arg_names) > 3: text += ( "\n# named argument style (new in 0.9)\n" - "@event.listens_for(%(sample_target)s, '%(event_name)s', named=True)\n" + "@event.listens_for(" + "%(sample_target)s, '%(event_name)s', named=True)\n" "def receive_%(event_name)s(**kw):\n" " \"listen for the '%(event_name)s' event\"\n" "%(example_kw_arg)s\n" @@ -85,72 +93,77 @@ def _standard_listen_example(dispatch_descriptor, sample_target, fn): ) text %= { - "current_since": " (arguments as of %s)" % - current_since if current_since else "", - "event_name": fn.__name__, - "has_kw_arguments": ", **kw" if dispatch_descriptor.has_kw else "", - "named_event_arguments": ", ".join(dispatch_descriptor.arg_names), - "example_kw_arg": example_kw_arg, - "sample_target": sample_target - } + "current_since": " (arguments as of %s)" % + current_since if current_since else "", + "event_name": fn.__name__, + "has_kw_arguments": ", **kw" if dispatch_collection.has_kw else "", + "named_event_arguments": ", ".join(dispatch_collection.arg_names), + "example_kw_arg": example_kw_arg, + "sample_target": sample_target + } return text -def _legacy_listen_examples(dispatch_descriptor, sample_target, fn): + +def _legacy_listen_examples(dispatch_collection, sample_target, fn): text = "" - for since, args, conv in dispatch_descriptor.legacy_signatures: + for since, args, conv in dispatch_collection.legacy_signatures: text += ( "\n# legacy calling style (pre-%(since)s)\n" "@event.listens_for(%(sample_target)s, '%(event_name)s')\n" - "def receive_%(event_name)s(%(named_event_arguments)s%(has_kw_arguments)s):\n" + "def receive_%(event_name)s(" + "%(named_event_arguments)s%(has_kw_arguments)s):\n" " \"listen for the '%(event_name)s' event\"\n" "\n # ... (event handling logic) ...\n" % { "since": since, "event_name": fn.__name__, - "has_kw_arguments": " **kw" if dispatch_descriptor.has_kw else "", + "has_kw_arguments": " **kw" + if dispatch_collection.has_kw else "", "named_event_arguments": ", ".join(args), "sample_target": sample_target } ) return text -def _version_signature_changes(dispatch_descriptor): - since, args, conv = dispatch_descriptor.legacy_signatures[0] + +def _version_signature_changes(dispatch_collection): + since, args, conv = dispatch_collection.legacy_signatures[0] return ( - "\n.. versionchanged:: %(since)s\n" - " The ``%(event_name)s`` event now accepts the \n" - " arguments ``%(named_event_arguments)s%(has_kw_arguments)s``.\n" - " Listener functions which accept the previous argument \n" - " signature(s) listed above will be automatically \n" - " adapted to the new signature." % { - "since": since, - "event_name": dispatch_descriptor.__name__, - "named_event_arguments": ", ".join(dispatch_descriptor.arg_names), - "has_kw_arguments": ", **kw" if dispatch_descriptor.has_kw else "" - } - ) + "\n.. versionchanged:: %(since)s\n" + " The ``%(event_name)s`` event now accepts the \n" + " arguments ``%(named_event_arguments)s%(has_kw_arguments)s``.\n" + " Listener functions which accept the previous argument \n" + " signature(s) listed above will be automatically \n" + " adapted to the new signature." % { + "since": since, + "event_name": dispatch_collection.name, + "named_event_arguments": ", ".join(dispatch_collection.arg_names), + "has_kw_arguments": ", **kw" if dispatch_collection.has_kw else "" + } + ) + -def _augment_fn_docs(dispatch_descriptor, parent_dispatch_cls, fn): +def _augment_fn_docs(dispatch_collection, parent_dispatch_cls, fn): header = ".. container:: event_signatures\n\n"\ - " Example argument forms::\n"\ - "\n" + " Example argument forms::\n"\ + "\n" sample_target = getattr(parent_dispatch_cls, "_target_class_doc", "obj") text = ( - header + - _indent( - _standard_listen_example( - dispatch_descriptor, sample_target, fn), - " " * 8) - ) - if dispatch_descriptor.legacy_signatures: + header + + _indent( + _standard_listen_example( + dispatch_collection, sample_target, fn), + " " * 8) + ) + if dispatch_collection.legacy_signatures: text += _indent( - _legacy_listen_examples( - dispatch_descriptor, sample_target, fn), - " " * 8) + _legacy_listen_examples( + dispatch_collection, sample_target, fn), + " " * 8) - text += _version_signature_changes(dispatch_descriptor) + text += _version_signature_changes(dispatch_collection) return util.inject_docstring_text(fn.__doc__, - text, - 1 - ) + text, + 1 + ) diff --git a/lib/sqlalchemy/event/registry.py b/lib/sqlalchemy/event/registry.py index 6f3eb3e85b..e1e9262b65 100644 --- a/lib/sqlalchemy/event/registry.py +++ b/lib/sqlalchemy/event/registry.py @@ -1,5 +1,6 @@ # event/registry.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -36,7 +37,7 @@ _collection_to_key = collections.defaultdict(dict) """ -Given a _ListenerCollection or _DispatchDescriptor, can locate +Given a _ListenerCollection or _ClsLevelListener, can locate all the original listen() arguments and the listener fn contained ref(listenercollection) -> { @@ -46,6 +47,7 @@ } """ + def _collection_gced(ref): # defaultdict, so can't get a KeyError if not _collection_to_key or ref not in _collection_to_key: @@ -59,6 +61,7 @@ def _collection_gced(ref): if not dispatch_reg: _key_to_collection.pop(key) + def _stored_in_collection(event_key, owner): key = event_key._key @@ -68,13 +71,16 @@ def _stored_in_collection(event_key, owner): listen_ref = weakref.ref(event_key._listen_fn) if owner_ref in dispatch_reg: - assert dispatch_reg[owner_ref] == listen_ref - else: - dispatch_reg[owner_ref] = listen_ref + return False + + dispatch_reg[owner_ref] = listen_ref listener_to_key = _collection_to_key[owner_ref] listener_to_key[listen_ref] = key + return True + + def _removed_from_collection(event_key, owner): key = event_key._key @@ -91,6 +97,7 @@ def _removed_from_collection(event_key, owner): listener_to_key = _collection_to_key[owner_ref] listener_to_key.pop(listen_ref) + def _stored_in_collection_multi(newowner, oldowner, elements): if not elements: return @@ -112,6 +119,7 @@ def _stored_in_collection_multi(newowner, oldowner, elements): new_listener_to_key[listen_ref] = key + def _clear(owner, elements): if not elements: return @@ -132,8 +140,12 @@ class _EventKey(object): """Represent :func:`.listen` arguments. """ + __slots__ = ( + 'target', 'identifier', 'fn', 'fn_key', 'fn_wrap', 'dispatch_target' + ) - def __init__(self, target, identifier, fn, dispatch_target, _fn_wrap=None): + def __init__(self, target, identifier, + fn, dispatch_target, _fn_wrap=None): self.target = target self.identifier = identifier self.fn = fn @@ -158,7 +170,7 @@ def with_wrapper(self, fn_wrap): self.fn, self.dispatch_target, _fn_wrap=fn_wrap - ) + ) def with_dispatch_target(self, dispatch_target): if dispatch_target is self.dispatch_target: @@ -170,12 +182,24 @@ def with_dispatch_target(self, dispatch_target): self.fn, dispatch_target, _fn_wrap=self.fn_wrap - ) + ) def listen(self, *args, **kw): once = kw.pop("once", False) + named = kw.pop("named", False) + + target, identifier, fn = \ + self.dispatch_target, self.identifier, self._listen_fn + + dispatch_collection = getattr(target.dispatch, identifier) + + adjusted_fn = dispatch_collection._adjust_fn_spec(fn, named) + + self = self.with_wrapper(adjusted_fn) + if once: - self.with_wrapper(util.only_once(self._listen_fn)).listen(*args, **kw) + self.with_wrapper( + util.only_once(self._listen_fn)).listen(*args, **kw) else: self.dispatch_target.dispatch._listen(self, *args, **kw) @@ -184,9 +208,9 @@ def remove(self): if key not in _key_to_collection: raise exc.InvalidRequestError( - "No listeners found for event %s / %r / %s " % - (self.target, self.identifier, self.fn) - ) + "No listeners found for event %s / %r / %s " % + (self.target, self.identifier, self.fn) + ) dispatch_reg = _key_to_collection.pop(key) for collection_ref, listener_ref in dispatch_reg.items(): @@ -201,41 +225,38 @@ def contains(self): return self._key in _key_to_collection def base_listen(self, propagate=False, insert=False, - named=False): + named=False): target, identifier, fn = \ self.dispatch_target, self.identifier, self._listen_fn - dispatch_descriptor = getattr(target.dispatch, identifier) - - fn = dispatch_descriptor._adjust_fn_spec(fn, named) - self = self.with_wrapper(fn) + dispatch_collection = getattr(target.dispatch, identifier) if insert: - dispatch_descriptor.\ - for_modify(target.dispatch).insert(self, propagate) + dispatch_collection.\ + for_modify(target.dispatch).insert(self, propagate) else: - dispatch_descriptor.\ - for_modify(target.dispatch).append(self, propagate) + dispatch_collection.\ + for_modify(target.dispatch).append(self, propagate) @property def _listen_fn(self): return self.fn_wrap or self.fn - def append_value_to_list(self, owner, list_, value): - _stored_in_collection(self, owner) - list_.append(value) - def append_to_list(self, owner, list_): - _stored_in_collection(self, owner) - list_.append(self._listen_fn) + if _stored_in_collection(self, owner): + list_.append(self._listen_fn) + return True + else: + return False def remove_from_list(self, owner, list_): _removed_from_collection(self, owner) list_.remove(self._listen_fn) def prepend_to_list(self, owner, list_): - _stored_in_collection(self, owner) - list_.insert(0, self._listen_fn) - - + if _stored_in_collection(self, owner): + list_.appendleft(self._listen_fn) + return True + else: + return False diff --git a/lib/sqlalchemy/events.py b/lib/sqlalchemy/events.py index 9ba6de68b3..34f14c605d 100644 --- a/lib/sqlalchemy/events.py +++ b/lib/sqlalchemy/events.py @@ -1,5 +1,6 @@ # sqlalchemy/events.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -11,6 +12,7 @@ from .engine import Connectable, Engine, Dialect from .sql.base import SchemaEventTarget + class DDLEvents(event.Events): """ Define event listeners for schema objects, @@ -74,7 +76,7 @@ def after_create(target, connection, **kw): _dispatch_target = SchemaEventTarget def before_create(self, target, connection, **kw): - """Called before CREATE statments are emitted. + """Called before CREATE statements are emitted. :param target: the :class:`.MetaData` or :class:`.Table` object which is the target of the event. @@ -90,7 +92,7 @@ def before_create(self, target, connection, **kw): """ def after_create(self, target, connection, **kw): - """Called after CREATE statments are emitted. + """Called after CREATE statements are emitted. :param target: the :class:`.MetaData` or :class:`.Table` object which is the target of the event. @@ -106,7 +108,7 @@ def after_create(self, target, connection, **kw): """ def before_drop(self, target, connection, **kw): - """Called before DROP statments are emitted. + """Called before DROP statements are emitted. :param target: the :class:`.MetaData` or :class:`.Table` object which is the target of the event. @@ -122,7 +124,7 @@ def before_drop(self, target, connection, **kw): """ def after_drop(self, target, connection, **kw): - """Called after DROP statments are emitted. + """Called after DROP statements are emitted. :param target: the :class:`.MetaData` or :class:`.Table` object which is the target of the event. @@ -220,7 +222,6 @@ def listen_for_reflect(inspector, table, column_info): """ - class PoolEvents(event.Events): """Available events for :class:`.Pool`. @@ -290,9 +291,9 @@ def first_connect(self, dbapi_connection, connection_record): :class:`.Pool` refers to a single "creator" function (which in terms of a :class:`.Engine` refers to the URL and connection options used), it is typically valid to make observations about a single connection - that can be safely assumed to be valid about all subsequent connections, - such as the database version, the server and client encoding settings, - collation settings, and many others. + that can be safely assumed to be valid about all subsequent + connections, such as the database version, the server and client + encoding settings, collation settings, and many others. :param dbapi_connection: a DBAPI connection. @@ -310,8 +311,8 @@ def checkout(self, dbapi_connection, connection_record, connection_proxy): DBAPI connection. :param connection_proxy: the :class:`._ConnectionFairy` object which - will proxy the public interface of the DBAPI connection for the lifespan - of the checkout. + will proxy the public interface of the DBAPI connection for the + lifespan of the checkout. If you raise a :class:`~sqlalchemy.exc.DisconnectionError`, the current connection will be disposed and a fresh connection retrieved. @@ -337,7 +338,7 @@ def checkin(self, dbapi_connection, connection_record): """ - def reset(self, dbapi_connnection, connection_record): + def reset(self, dbapi_connection, connection_record): """Called before the "reset" action occurs for a pooled connection. This event represents @@ -348,7 +349,7 @@ def reset(self, dbapi_connnection, connection_record): The :meth:`.PoolEvents.reset` event is usually followed by the - the :meth:`.PoolEvents.checkin` event is called, except in those + :meth:`.PoolEvents.checkin` event is called, except in those cases where the connection is discarded immediately after reset. :param dbapi_connection: a DBAPI connection. @@ -370,9 +371,11 @@ def invalidate(self, dbapi_connection, connection_record, exception): """Called when a DBAPI connection is to be "invalidated". This event is called any time the :meth:`._ConnectionRecord.invalidate` - method is invoked, either from API usage or via "auto-invalidation". - The event occurs before a final attempt to call ``.close()`` on the connection - occurs. + method is invoked, either from API usage or via "auto-invalidation", + without the ``soft`` flag. + + The event occurs before a final attempt to call ``.close()`` on the + connection occurs. :param dbapi_connection: a DBAPI connection. @@ -391,6 +394,21 @@ def invalidate(self, dbapi_connection, connection_record, exception): """ + def soft_invalidate(self, dbapi_connection, connection_record, exception): + """Called when a DBAPI connection is to be "soft invalidated". + + This event is called any time the :meth:`._ConnectionRecord.invalidate` + method is invoked with the ``soft`` flag. + + Soft invalidation refers to when the connection record that tracks + this connection will force a reconnect after the current connection + is checked in. It does not actively close the dbapi_connection + at the point at which it is called. + + .. versionadded:: 1.0.3 + + """ + class ConnectionEvents(event.Events): """Available events for :class:`.Connectable`, which includes @@ -406,7 +424,7 @@ class or instance, such as an :class:`.Engine`, e.g.:: def before_cursor_execute(conn, cursor, statement, parameters, context, executemany): - log.info("Received statement: %s" % statement) + log.info("Received statement: %s", statement) engine = create_engine('postgresql://scott:tiger@localhost/test') event.listen(engine, "before_cursor_execute", before_cursor_execute) @@ -417,7 +435,13 @@ def before_cursor_execute(conn, cursor, statement, parameters, context, @event.listens_for(conn, 'before_cursor_execute') def before_cursor_execute(conn, cursor, statement, parameters, context, executemany): - log.info("Received statement: %s" % statement) + log.info("Received statement: %s", statement) + + When the methods are called with a `statement` parameter, such as in + :meth:`.after_cursor_execute`, :meth:`.before_cursor_execute` and + :meth:`.dbapi_error`, the statement is the exact SQL string that was + prepared for transmission to the DBAPI ``cursor`` in the connection's + :class:`.Dialect`. The :meth:`.before_execute` and :meth:`.before_cursor_execute` events can also be established with the ``retval=True`` flag, which @@ -466,11 +490,11 @@ class or to an instance of :class:`.Engine` *after* the instantiation _target_class_doc = "SomeEngine" _dispatch_target = Connectable - @classmethod def _listen(cls, event_key, retval=False): target, identifier, fn = \ - event_key.dispatch_target, event_key.identifier, event_key.fn + event_key.dispatch_target, event_key.identifier, \ + event_key._listen_fn target._has_events = True @@ -479,7 +503,7 @@ def _listen(cls, event_key, retval=False): orig_fn = fn def wrap_before_execute(conn, clauseelement, - multiparams, params): + multiparams, params): orig_fn(conn, clauseelement, multiparams, params) return clauseelement, multiparams, params fn = wrap_before_execute @@ -487,19 +511,20 @@ def wrap_before_execute(conn, clauseelement, orig_fn = fn def wrap_before_cursor_execute(conn, cursor, statement, - parameters, context, executemany): + parameters, context, + executemany): orig_fn(conn, cursor, statement, - parameters, context, executemany) + parameters, context, executemany) return statement, parameters fn = wrap_before_cursor_execute - elif retval and \ - identifier not in ('before_execute', 'before_cursor_execute'): + identifier not in ('before_execute', + 'before_cursor_execute', 'handle_error'): raise exc.ArgumentError( - "Only the 'before_execute' and " - "'before_cursor_execute' engine " - "event listeners accept the 'retval=True' " - "argument.") + "Only the 'before_execute', " + "'before_cursor_execute' and 'handle_error' engine " + "event listeners accept the 'retval=True' " + "argument.") event_key.with_wrapper(fn).base_listen() def before_execute(self, conn, clauseelement, multiparams, params): @@ -545,11 +570,10 @@ def after_execute(self, conn, clauseelement, multiparams, params, result): """ def before_cursor_execute(self, conn, cursor, statement, - parameters, context, executemany): + parameters, context, executemany): """Intercept low-level cursor execute() events before execution, - receiving the string - SQL statement and DBAPI-specific parameter list to be invoked - against a cursor. + receiving the string SQL statement and DBAPI-specific parameter list to + be invoked against a cursor. This event is a good choice for logging as well as late modifications to the SQL string. It's less ideal for parameter modifications except @@ -569,7 +593,7 @@ def before_cursor_execute(conn, cursor, statement, :param conn: :class:`.Connection` object :param cursor: DBAPI cursor object - :param statement: string SQL statement + :param statement: string SQL statement, as to be passed to the DBAPI :param parameters: Dictionary, tuple, or list of parameters being passed to the ``execute()`` or ``executemany()`` method of the DBAPI ``cursor``. In some cases may be ``None``. @@ -587,14 +611,14 @@ def before_cursor_execute(conn, cursor, statement, """ def after_cursor_execute(self, conn, cursor, statement, - parameters, context, executemany): + parameters, context, executemany): """Intercept low-level cursor execute() events after execution. :param conn: :class:`.Connection` object :param cursor: DBAPI cursor object. Will have results pending if the statement was a SELECT, but these should not be consumed as they will be needed by the :class:`.ResultProxy`. - :param statement: string SQL statement + :param statement: string SQL statement, as passed to the DBAPI :param parameters: Dictionary, tuple, or list of parameters being passed to the ``execute()`` or ``executemany()`` method of the DBAPI ``cursor``. In some cases may be ``None``. @@ -606,7 +630,7 @@ def after_cursor_execute(self, conn, cursor, statement, """ def dbapi_error(self, conn, cursor, statement, parameters, - context, exception): + context, exception): """Intercept a raw DBAPI error. This event is called with the DBAPI exception instance @@ -618,10 +642,15 @@ def dbapi_error(self, conn, cursor, statement, parameters, The use case here is to inject low-level exception handling into an :class:`.Engine`, typically for logging and - debugging purposes. In general, user code should **not** modify - any state or throw any exceptions here as this will - interfere with SQLAlchemy's cleanup and error handling - routines. + debugging purposes. + + .. warning:: + + Code should **not** modify + any state or throw any exceptions here as this will + interfere with SQLAlchemy's cleanup and error handling + routines. For exception modification, please refer to the + new :meth:`.ConnectionEvents.handle_error` event. Subsequent to this hook, SQLAlchemy may attempt any number of operations on the connection/cursor, including @@ -633,7 +662,7 @@ def dbapi_error(self, conn, cursor, statement, parameters, :param conn: :class:`.Connection` object :param cursor: DBAPI cursor object - :param statement: string SQL statement + :param statement: string SQL statement, as passed to the DBAPI :param parameters: Dictionary, tuple, or list of parameters being passed to the ``execute()`` or ``executemany()`` method of the DBAPI ``cursor``. In some cases may be ``None``. @@ -642,7 +671,116 @@ def dbapi_error(self, conn, cursor, statement, parameters, :param exception: The **unwrapped** exception emitted directly from the DBAPI. The class here is specific to the DBAPI module in use. - .. versionadded:: 0.7.7 + .. deprecated:: 0.9.7 - replaced by + :meth:`.ConnectionEvents.handle_error` + + """ + + def handle_error(self, exception_context): + """Intercept all exceptions processed by the :class:`.Connection`. + + This includes all exceptions emitted by the DBAPI as well as + within SQLAlchemy's statement invocation process, including + encoding errors and other statement validation errors. Other areas + in which the event is invoked include transaction begin and end, + result row fetching, cursor creation. + + Note that :meth:`.handle_error` may support new kinds of exceptions + and new calling scenarios at *any time*. Code which uses this + event must expect new calling patterns to be present in minor + releases. + + To support the wide variety of members that correspond to an exception, + as well as to allow extensibility of the event without backwards + incompatibility, the sole argument received is an instance of + :class:`.ExceptionContext`. This object contains data members + representing detail about the exception. + + Use cases supported by this hook include: + + * read-only, low-level exception handling for logging and + debugging purposes + * exception re-writing + + The hook is called while the cursor from the failed operation + (if any) is still open and accessible. Special cleanup operations + can be called on this cursor; SQLAlchemy will attempt to close + this cursor subsequent to this hook being invoked. If the connection + is in "autocommit" mode, the transaction also remains open within + the scope of this hook; the rollback of the per-statement transaction + also occurs after the hook is called. + + The user-defined event handler has two options for replacing + the SQLAlchemy-constructed exception into one that is user + defined. It can either raise this new exception directly, in + which case all further event listeners are bypassed and the + exception will be raised, after appropriate cleanup as taken + place:: + + @event.listens_for(Engine, "handle_error") + def handle_exception(context): + if isinstance(context.original_exception, + psycopg2.OperationalError) and \\ + "failed" in str(context.original_exception): + raise MySpecialException("failed operation") + + .. warning:: Because the :meth:`.ConnectionEvents.handle_error` + event specifically provides for exceptions to be re-thrown as + the ultimate exception raised by the failed statement, + **stack traces will be misleading** if the user-defined event + handler itself fails and throws an unexpected exception; + the stack trace may not illustrate the actual code line that + failed! It is advised to code carefully here and use + logging and/or inline debugging if unexpected exceptions are + occurring. + + Alternatively, a "chained" style of event handling can be + used, by configuring the handler with the ``retval=True`` + modifier and returning the new exception instance from the + function. In this case, event handling will continue onto the + next handler. The "chained" exception is available using + :attr:`.ExceptionContext.chained_exception`:: + + @event.listens_for(Engine, "handle_error", retval=True) + def handle_exception(context): + if context.chained_exception is not None and \\ + "special" in context.chained_exception.message: + return MySpecialException("failed", + cause=context.chained_exception) + + Handlers that return ``None`` may remain within this chain; the + last non-``None`` return value is the one that continues to be + passed to the next handler. + + When a custom exception is raised or returned, SQLAlchemy raises + this new exception as-is, it is not wrapped by any SQLAlchemy + object. If the exception is not a subclass of + :class:`sqlalchemy.exc.StatementError`, + certain features may not be available; currently this includes + the ORM's feature of adding a detail hint about "autoflush" to + exceptions raised within the autoflush process. + + :param context: an :class:`.ExceptionContext` object. See this + class for details on all available members. + + .. versionadded:: 0.9.7 Added the + :meth:`.ConnectionEvents.handle_error` hook. + + .. versionchanged:: 1.0.0 The :meth:`.handle_error` event is now + invoked when an :class:`.Engine` fails during the initial + call to :meth:`.Engine.connect`, as well as when a + :class:`.Connection` object encounters an error during a + reconnect operation. + + .. versionchanged:: 1.0.0 The :meth:`.handle_error` event is + not fired off when a dialect makes use of the + ``skip_user_error_events`` execution option. This is used + by dialects which intend to catch SQLAlchemy-specific exceptions + within specific operations, such as when the MySQL dialect detects + a table not present within the ``has_table()`` dialect method. + Prior to 1.0.0, code which implements :meth:`.handle_error` needs + to ensure that exceptions thrown in these scenarios are re-raised + without modification. """ @@ -661,8 +799,8 @@ def engine_connect(self, conn, branch): It also differs from the :meth:`.PoolEvents.checkout` event in that it is specific to the :class:`.Connection` object, not the DBAPI connection that :meth:`.PoolEvents.checkout` deals with, although - this DBAPI connection is available here via the :attr:`.Connection.connection` - attribute. But note there can in fact + this DBAPI connection is available here via the + :attr:`.Connection.connection` attribute. But note there can in fact be multiple :meth:`.PoolEvents.checkout` events within the lifespan of a single :class:`.Connection` object, if that :class:`.Connection` is invalidated and re-established. There can also be multiple @@ -681,11 +819,16 @@ def engine_connect(self, conn, branch): .. seealso:: + :ref:`pool_disconnects_pessimistic` - illustrates how to use + :meth:`.ConnectionEvents.engine_connect` + to transparently ensure pooled connections are connected to the + database. + :meth:`.PoolEvents.checkout` the lower-level pool checkout event for an individual DBAPI connection - :meth:`.ConnectionEvents.set_connection_execution_options` - a copy of a - :class:`.Connection` is also made when the + :meth:`.ConnectionEvents.set_connection_execution_options` - a copy + of a :class:`.Connection` is also made when the :meth:`.Connection.execution_options` method is called. """ @@ -739,10 +882,28 @@ def set_engine_execution_options(self, engine, opts): .. seealso:: :meth:`.ConnectionEvents.set_connection_execution_options` - event - which is called when :meth:`.Connection.execution_options` is called. + which is called when :meth:`.Connection.execution_options` is + called. """ + def engine_disposed(self, engine): + """Intercept when the :meth:`.Engine.dispose` method is called. + + The :meth:`.Engine.dispose` method instructs the engine to + "dispose" of it's connection pool (e.g. :class:`.Pool`), and + replaces it with a new one. Disposing of the old pool has the + effect that existing checked-in connections are closed. The new + pool does not establish any new connections until it is first used. + + This event can be used to indicate that resources related to the + :class:`.Engine` should also be cleaned up, keeping in mind that the + :class:`.Engine` can still be used for new requests in which case + it re-acquires connection resources. + + .. versionadded:: 1.0.5 + + """ def begin(self, conn): """Intercept begin() events. @@ -852,10 +1013,10 @@ class DialectEvents(event.Events): :class:`.DialectEvents` hooks should be considered **semi-public** and experimental. - These hooks are not for general use and are only for those situations where - intricate re-statement of DBAPI mechanics must be injected onto an existing - dialect. For general-use statement-interception events, please - use the :class:`.ConnectionEvents` interface. + These hooks are not for general use and are only for those situations + where intricate re-statement of DBAPI mechanics must be injected onto + an existing dialect. For general-use statement-interception events, + please use the :class:`.ConnectionEvents` interface. .. seealso:: @@ -895,6 +1056,23 @@ def _accept_with(cls, target): else: return target + def do_connect(self, dialect, conn_rec, cargs, cparams): + """Receive connection arguments before a connection is made. + + Return a DBAPI connection to halt further events from invoking; + the returned connection will be used. + + Alternatively, the event can manipulate the cargs and/or cparams + collections; cargs will always be a Python list that can be mutated + in-place and cparams a Python dictionary. Return None to + allow control to pass to the next event handler and ultimately + to allow the dialect to connect normally, given the updated + arguments. + + .. versionadded:: 1.0.3 + + """ + def do_executemany(self, cursor, statement, parameters, context): """Receive a cursor to have executemany() called. @@ -921,4 +1099,3 @@ def do_execute(self, cursor, statement, parameters, context): place within the event handler. """ - diff --git a/lib/sqlalchemy/exc.py b/lib/sqlalchemy/exc.py index 68e517e265..2729842293 100644 --- a/lib/sqlalchemy/exc.py +++ b/lib/sqlalchemy/exc.py @@ -1,5 +1,6 @@ # sqlalchemy/exc.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -12,8 +13,6 @@ """ -import traceback - class SQLAlchemyError(Exception): """Generic error class.""" @@ -26,10 +25,12 @@ class ArgumentError(SQLAlchemyError): """ + class NoSuchModuleError(ArgumentError): """Raised when a dynamically-loaded module (usually a database dialect) of a particular name cannot be located.""" + class NoForeignKeysError(ArgumentError): """Raised when no foreign keys can be located between two selectables during a join.""" @@ -51,8 +52,7 @@ class CircularDependencyError(SQLAlchemyError): or pre-deassociate one of the foreign key constrained values. The ``post_update`` flag described at :ref:`post_update` can resolve this cycle. - * In a :meth:`.MetaData.create_all`, :meth:`.MetaData.drop_all`, - :attr:`.MetaData.sorted_tables` operation, two :class:`.ForeignKey` + * In a :attr:`.MetaData.sorted_tables` operation, two :class:`.ForeignKey` or :class:`.ForeignKeyConstraint` objects mutually refer to each other. Apply the ``use_alter=True`` flag to one or both, see :ref:`use_alter`. @@ -60,7 +60,7 @@ class CircularDependencyError(SQLAlchemyError): """ def __init__(self, message, cycles, edges, msg=None): if msg is None: - message += " Cycles: %r all edges: %r" % (cycles, edges) + message += " (%s)" % ", ".join(repr(s) for s in cycles) else: message = msg SQLAlchemyError.__init__(self, message) @@ -69,12 +69,13 @@ def __init__(self, message, cycles, edges, msg=None): def __reduce__(self): return self.__class__, (None, self.cycles, - self.edges, self.args[0]) + self.edges, self.args[0]) class CompileError(SQLAlchemyError): """Raised when an error occurs during SQL compilation""" + class UnsupportedCompilationError(CompileError): """Raised when an operation is not supported by the given compiler. @@ -85,8 +86,9 @@ class UnsupportedCompilationError(CompileError): def __init__(self, compiler, element_type): super(UnsupportedCompilationError, self).__init__( - "Compiler %r can't render element of type %s" % - (compiler, element_type)) + "Compiler %r can't render element of type %s" % + (compiler, element_type)) + class IdentifierError(SQLAlchemyError): """Raised when a schema name is beyond the max character limit""" @@ -159,7 +161,7 @@ def __init__(self, message, tname, cname): def __reduce__(self): return self.__class__, (self.args[0], self.table_name, - self.column_name) + self.column_name) class NoSuchTableError(InvalidRequestError): @@ -233,14 +235,16 @@ def __reduce__(self): def __str__(self): from sqlalchemy.sql import util - params_repr = util._repr_params(self.params, 10) + details = [SQLAlchemyError.__str__(self)] + if self.statement: + details.append("[SQL: %r]" % self.statement) + if self.params: + params_repr = util._repr_params(self.params, 10) + details.append("[parameters: %r]" % params_repr) return ' '.join([ - "(%s)" % det for det in self.detail - ] + [ - SQLAlchemyError.__str__(self), - repr(self.statement), repr(params_repr) - ]) + "(%s)" % det for det in self.detail + ] + details) def __unicode__(self): return self.__str__() @@ -271,48 +275,55 @@ class DBAPIError(StatementError): @classmethod def instance(cls, statement, params, - orig, - dbapi_base_err, - connection_invalidated=False): + orig, dbapi_base_err, + connection_invalidated=False, + dialect=None): # Don't ever wrap these, just return them directly as if # DBAPIError didn't exist. - if isinstance(orig, (KeyboardInterrupt, SystemExit, DontWrapMixin)): + if (isinstance(orig, BaseException) and + not isinstance(orig, Exception)) or \ + isinstance(orig, DontWrapMixin): return orig if orig is not None: # not a DBAPI error, statement is present. # raise a StatementError if not isinstance(orig, dbapi_base_err) and statement: - msg = traceback.format_exception_only( - orig.__class__, orig)[-1].strip() return StatementError( - "%s (original cause: %s)" % (str(orig), msg), + "(%s.%s) %s" % + (orig.__class__.__module__, orig.__class__.__name__, + orig), statement, params, orig ) - name, glob = orig.__class__.__name__, globals() - if name in glob and issubclass(glob[name], DBAPIError): - cls = glob[name] + glob = globals() + for super_ in orig.__class__.__mro__: + name = super_.__name__ + if dialect: + name = dialect.dbapi_exception_translation_map.get( + name, name) + if name in glob and issubclass(glob[name], DBAPIError): + cls = glob[name] + break return cls(statement, params, orig, connection_invalidated) def __reduce__(self): return self.__class__, (self.statement, self.params, - self.orig, self.connection_invalidated) + self.orig, self.connection_invalidated) def __init__(self, statement, params, orig, connection_invalidated=False): try: text = str(orig) - except (KeyboardInterrupt, SystemExit): - raise except Exception as e: text = 'Error in str() of DB-API-generated exception: ' + str(e) StatementError.__init__( - self, - '(%s) %s' % (orig.__class__.__name__, text), - statement, - params, - orig + self, + '(%s.%s) %s' % ( + orig.__class__.__module__, orig.__class__.__name__, text, ), + statement, + params, + orig ) self.connection_invalidated = connection_invalidated diff --git a/lib/sqlalchemy/ext/__init__.py b/lib/sqlalchemy/ext/__init__.py index 1d77acaa74..1c8a59a18d 100644 --- a/lib/sqlalchemy/ext/__init__.py +++ b/lib/sqlalchemy/ext/__init__.py @@ -1,5 +1,11 @@ # ext/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php + +from .. import util as _sa_util + +_sa_util.dependencies.resolve_all("sqlalchemy.ext") + diff --git a/lib/sqlalchemy/ext/associationproxy.py b/lib/sqlalchemy/ext/associationproxy.py index 045645f866..fdc44f386c 100644 --- a/lib/sqlalchemy/ext/associationproxy.py +++ b/lib/sqlalchemy/ext/associationproxy.py @@ -1,5 +1,6 @@ # ext/associationproxy.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -76,24 +77,24 @@ def association_proxy(target_collection, attr, **kw): ASSOCIATION_PROXY = util.symbol('ASSOCIATION_PROXY') -"""Symbol indicating an :class:`_InspectionAttr` that's +"""Symbol indicating an :class:`InspectionAttr` that's of type :class:`.AssociationProxy`. - Is assigned to the :attr:`._InspectionAttr.extension_type` + Is assigned to the :attr:`.InspectionAttr.extension_type` attibute. """ -class AssociationProxy(interfaces._InspectionAttr): + +class AssociationProxy(interfaces.InspectionAttrInfo): """A descriptor that presents a read/write view of an object attribute.""" is_attribute = False extension_type = ASSOCIATION_PROXY - def __init__(self, target_collection, attr, creator=None, getset_factory=None, proxy_factory=None, - proxy_bulk_set=None): + proxy_bulk_set=None, info=None): """Construct a new :class:`.AssociationProxy`. The :func:`.association_proxy` function is provided as the usual @@ -137,6 +138,11 @@ def __init__(self, target_collection, attr, creator=None, :param proxy_bulk_set: Optional, use with proxy_factory. See the _set() method for details. + :param info: optional, will be assigned to + :attr:`.AssociationProxy.info` if present. + + .. versionadded:: 1.0.9 + """ self.target_collection = target_collection self.value_attr = attr @@ -149,6 +155,8 @@ def __init__(self, target_collection, attr, creator=None, self.key = '_%s_%s_%s' % ( type(self).__name__, target_collection, id(self)) self.collection_class = None + if info: + self.info = info @property def remote_attr(self): @@ -229,7 +237,7 @@ def scalar(self): @util.memoized_property def _value_is_scalar(self): return not self._get_property().\ - mapper.get_property(self.value_attr).uselist + mapper.get_property(self.value_attr).uselist @util.memoized_property def _target_is_object(self): @@ -348,8 +356,8 @@ def _set(self, proxy, values): proxy.update(values) else: raise exc.ArgumentError( - 'no proxy_bulk_set supplied for custom ' - 'collection_class implementation') + 'no proxy_bulk_set supplied for custom ' + 'collection_class implementation') @property def _comparator(self): @@ -364,25 +372,29 @@ def any(self, criterion=None, **kwargs): operators of the underlying proxied attributes. """ - - if self._value_is_scalar: - value_expr = getattr( - self.target_class, self.value_attr).has(criterion, **kwargs) + if self._target_is_object: + if self._value_is_scalar: + value_expr = getattr( + self.target_class, self.value_attr).has( + criterion, **kwargs) + else: + value_expr = getattr( + self.target_class, self.value_attr).any( + criterion, **kwargs) else: - value_expr = getattr( - self.target_class, self.value_attr).any(criterion, **kwargs) + value_expr = criterion # check _value_is_scalar here, otherwise # we're scalar->scalar - call .any() so that # the "can't call any() on a scalar" msg is raised. if self.scalar and not self._value_is_scalar: return self._comparator.has( - value_expr - ) + value_expr + ) else: return self._comparator.any( - value_expr - ) + value_expr + ) def has(self, criterion=None, **kwargs): """Produce a proxied 'has' expression using EXISTS. @@ -396,14 +408,14 @@ def has(self, criterion=None, **kwargs): if self._target_is_object: return self._comparator.has( - getattr(self.target_class, self.value_attr).\ - has(criterion, **kwargs) - ) + getattr(self.target_class, self.value_attr). + has(criterion, **kwargs) + ) else: if criterion is not None or kwargs: raise exc.ArgumentError( - "Non-empty has() not allowed for " - "column-targeted association proxy; use ==") + "Non-empty has() not allowed for " + "column-targeted association proxy; use ==") return self._comparator.has() def contains(self, obj): @@ -428,9 +440,9 @@ def __eq__(self, obj): # is only allowed with a scalar. if obj is None: return or_( - self._comparator.has(**{self.value_attr: obj}), - self._comparator == None - ) + self._comparator.has(**{self.value_attr: obj}), + self._comparator == None + ) else: return self._comparator.has(**{self.value_attr: obj}) @@ -438,7 +450,7 @@ def __ne__(self, obj): # note the has() here will fail for collections; eq_() # is only allowed with a scalar. return self._comparator.has( - getattr(self.target_class, self.value_attr) != obj) + getattr(self.target_class, self.value_attr) != obj) class _lazy_collection(object): @@ -450,8 +462,8 @@ def __call__(self): obj = self.ref() if obj is None: raise exc.InvalidRequestError( - "stale association proxy, parent object has gone out of " - "scope") + "stale association proxy, parent object has gone out of " + "scope") return getattr(obj, self.target) def __getstate__(self): @@ -526,7 +538,10 @@ def _set(self, object, value): return self.setter(object, value) def __getitem__(self, index): - return self._get(self.col[index]) + if not isinstance(index, slice): + return self._get(self.col[index]) + else: + return [self._get(member) for member in self.col[index]] def __setitem__(self, index, value): if not isinstance(index, slice): @@ -588,7 +603,7 @@ def __iter__(self): for member in self.col: yield self._get(member) - raise StopIteration + return def append(self, value): item = self._create(value) @@ -697,7 +712,7 @@ def __hash__(self): for func_name, func in list(locals().items()): if (util.callable(func) and func.__name__ == func_name and - not func.__doc__ and hasattr(list, func_name)): + not func.__doc__ and hasattr(list, func_name)): func.__doc__ = getattr(list, func_name).__doc__ del func_name, func @@ -834,8 +849,8 @@ def update(self, *a, **kw): self[k] = v except ValueError: raise ValueError( - "dictionary update sequence " - "requires 2-element tuples") + "dictionary update sequence " + "requires 2-element tuples") for key, value in kw: self[key] = value @@ -848,7 +863,7 @@ def __hash__(self): for func_name, func in list(locals().items()): if (util.callable(func) and func.__name__ == func_name and - not func.__doc__ and hasattr(dict, func_name)): + not func.__doc__ and hasattr(dict, func_name)): func.__doc__ = getattr(dict, func_name).__doc__ del func_name, func @@ -892,7 +907,7 @@ def __iter__(self): """ for member in self.col: yield self._get(member) - raise StopIteration + return def add(self, value): if value not in self: @@ -1048,6 +1063,6 @@ def __hash__(self): for func_name, func in list(locals().items()): if (util.callable(func) and func.__name__ == func_name and - not func.__doc__ and hasattr(set, func_name)): + not func.__doc__ and hasattr(set, func_name)): func.__doc__ = getattr(set, func_name).__doc__ del func_name, func diff --git a/lib/sqlalchemy/ext/automap.py b/lib/sqlalchemy/ext/automap.py index dfc838da31..023d11ca8e 100644 --- a/lib/sqlalchemy/ext/automap.py +++ b/lib/sqlalchemy/ext/automap.py @@ -1,5 +1,6 @@ # ext/automap.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -10,12 +11,6 @@ .. versionadded:: 0.9.1 Added :mod:`sqlalchemy.ext.automap`. -.. note:: - - The :mod:`sqlalchemy.ext.automap` extension should be considered - **experimental** as of 0.9.1. Featureset and API stability is - not guaranteed at this time. - It is hoped that the :class:`.AutomapBase` system provides a quick and modernized solution to the problem that the very famous `SQLSoup `_ @@ -59,28 +54,36 @@ session.add(Address(email_address="foo@bar.com", user=User(name="foo"))) session.commit() - # collection-based relationships are by default named "_collection" + # collection-based relationships are by default named + # "_collection" print (u1.address_collection) Above, calling :meth:`.AutomapBase.prepare` while passing along the :paramref:`.AutomapBase.prepare.reflect` parameter indicates that the :meth:`.MetaData.reflect` method will be called on this declarative base -classes' :class:`.MetaData` collection; then, each viable +classes' :class:`.MetaData` collection; then, each **viable** :class:`.Table` within the :class:`.MetaData` will get a new mapped class generated automatically. The :class:`.ForeignKeyConstraint` objects which link the various tables together will be used to produce new, bidirectional :func:`.relationship` objects between classes. The classes and relationships follow along a default naming scheme that we can customize. At this point, -our basic mapping consisting of related ``User`` and ``Address`` classes is ready -to use in the traditional way. +our basic mapping consisting of related ``User`` and ``Address`` classes is +ready to use in the traditional way. + +.. note:: By **viable**, we mean that for a table to be mapped, it must + specify a primary key. Additionally, if the table is detected as being + a pure association table between two other tables, it will not be directly + mapped and will instead be configured as a many-to-many table between + the mappings for the two referring tables. Generating Mappings from an Existing MetaData ============================================= We can pass a pre-declared :class:`.MetaData` object to :func:`.automap_base`. This object can be constructed in any way, including programmatically, from -a serialized file, or from itself being reflected using :meth:`.MetaData.reflect`. -Below we illustrate a combination of reflection and explicit table declaration:: +a serialized file, or from itself being reflected using +:meth:`.MetaData.reflect`. Below we illustrate a combination of reflection and +explicit table declaration:: from sqlalchemy import create_engine, MetaData, Table, Column, ForeignKey engine = create_engine("sqlite:///mydatabase.db") @@ -105,19 +108,20 @@ Base.prepare() # mapped classes are ready - User, Address, Order = Base.classes.user, Base.classes.address, Base.classes.user_order + User, Address, Order = Base.classes.user, Base.classes.address,\ + Base.classes.user_order -Specifying Classes Explcitly -============================ +Specifying Classes Explicitly +============================= The :mod:`.sqlalchemy.ext.automap` extension allows classes to be defined explicitly, in a way similar to that of the :class:`.DeferredReflection` class. Classes that extend from :class:`.AutomapBase` act like regular declarative -classes, but are not immediately mapped after their construction, and are instead -mapped when we call :meth:`.AutomapBase.prepare`. The :meth:`.AutomapBase.prepare` -method will make use of the classes we've established based on the table name -we use. If our schema contains tables ``user`` and ``address``, we can define -one or both of the classes to be used:: +classes, but are not immediately mapped after their construction, and are +instead mapped when we call :meth:`.AutomapBase.prepare`. The +:meth:`.AutomapBase.prepare` method will make use of the classes we've +established based on the table name we use. If our schema contains tables +``user`` and ``address``, we can define one or both of the classes to be used:: from sqlalchemy.ext.automap import automap_base from sqlalchemy import create_engine @@ -133,9 +137,9 @@ class User(Base): user_name = Column('name', String) # override relationships too, if desired. - # we must use the same name that automap would use for the relationship, - # and also must refer to the class name that automap will generate - # for "address" + # we must use the same name that automap would use for the + # relationship, and also must refer to the class name that automap will + # generate for "address" address_collection = relationship("address", collection_class=set) # reflect @@ -157,10 +161,10 @@ class User(Base): Above, one of the more intricate details is that we illustrated overriding one of the :func:`.relationship` objects that automap would have created. To do this, we needed to make sure the names match up with what automap -would normally generate, in that the relationship name would be ``User.address_collection`` -and the name of the class referred to, from automap's perspective, is called -``address``, even though we are referring to it as ``Address`` within our usage -of this class. +would normally generate, in that the relationship name would be +``User.address_collection`` and the name of the class referred to, from +automap's perspective, is called ``address``, even though we are referring to +it as ``Address`` within our usage of this class. Overriding Naming Schemes ========================= @@ -184,7 +188,7 @@ def camelize_classname(base, tablename, table): "'words_and_underscores' -> 'WordsAndUnderscores'" return str(tablename[0].upper() + \\ - re.sub(r'_(\w)', lambda m: m.group(1).upper(), tablename[1:])) + re.sub(r'_([a-z])', lambda m: m.group(1).upper(), tablename[1:])) _pluralizer = inflect.engine() def pluralize_collection(base, local_cls, referred_cls, constraint): @@ -192,10 +196,9 @@ def pluralize_collection(base, local_cls, referred_cls, constraint): "'SomeTerm' -> 'some_terms'" referred_name = referred_cls.__name__ - uncamelized = referred_name[0].lower() + \\ - re.sub(r'\W', - lambda m: "_%s" % m.group(0).lower(), - referred_name[1:]) + uncamelized = re.sub(r'[A-Z]', + lambda m: "_%s" % m.group(0).lower(), + referred_name)[1:] pluralized = _pluralizer.plural(uncamelized) return pluralized @@ -211,7 +214,8 @@ def pluralize_collection(base, local_cls, referred_cls, constraint): ) From the above mapping, we would now have classes ``User`` and ``Address``, -where the collection from ``User`` to ``Address`` is called ``User.addresses``:: +where the collection from ``User`` to ``Address`` is called +``User.addresses``:: User, Address = Base.classes.User, Base.classes.Address @@ -222,7 +226,8 @@ def pluralize_collection(base, local_cls, referred_cls, constraint): The vast majority of what automap accomplishes is the generation of :func:`.relationship` structures based on foreign keys. The mechanism -by which this works for many-to-one and one-to-many relationships is as follows: +by which this works for many-to-one and one-to-many relationships is as +follows: 1. A given :class:`.Table`, known to be mapped to a particular class, is examined for :class:`.ForeignKeyConstraint` objects. @@ -231,13 +236,32 @@ def pluralize_collection(base, local_cls, referred_cls, constraint): object present is matched up to the class to which it is to be mapped, if any, else it is skipped. -3. As the :class:`.ForeignKeyConstraint` we are examining correponds to a reference - from the immediate mapped class, - the relationship will be set up as a many-to-one referring to the referred class; - a corresponding one-to-many backref will be created on the referred class referring +3. As the :class:`.ForeignKeyConstraint` we are examining corresponds to a + reference from the immediate mapped class, the relationship will be set up + as a many-to-one referring to the referred class; a corresponding + one-to-many backref will be created on the referred class referring to this class. -4. The names of the relationships are determined using the +4. If any of the columns that are part of the :class:`.ForeignKeyConstraint` + are not nullable (e.g. ``nullable=False``), a + :paramref:`~.relationship.cascade` keyword argument + of ``all, delete-orphan`` will be added to the keyword arguments to + be passed to the relationship or backref. If the + :class:`.ForeignKeyConstraint` reports that + :paramref:`.ForeignKeyConstraint.ondelete` + is set to ``CASCADE`` for a not null or ``SET NULL`` for a nullable + set of columns, the option :paramref:`~.relationship.passive_deletes` + flag is set to ``True`` in the set of relationship keyword arguments. + Note that not all backends support reflection of ON DELETE. + + .. versionadded:: 1.0.0 - automap will detect non-nullable foreign key + constraints when producing a one-to-many relationship and establish + a default cascade of ``all, delete-orphan`` if so; additionally, + if the constraint specifies :paramref:`.ForeignKeyConstraint.ondelete` + of ``CASCADE`` for non-nullable or ``SET NULL`` for nullable columns, + the ``passive_deletes=True`` option is also added. + +5. The names of the relationships are determined using the :paramref:`.AutomapBase.prepare.name_for_scalar_relationship` and :paramref:`.AutomapBase.prepare.name_for_collection_relationship` callable functions. It is important to note that the default relationship @@ -246,18 +270,18 @@ def pluralize_collection(base, local_cls, referred_cls, constraint): alternate class naming scheme, that's the name from which the relationship name will be derived. -5. The classes are inspected for an existing mapped property matching these - names. If one is detected on one side, but none on the other side, :class:`.AutomapBase` - attempts to create a relationship on the missing side, then uses the - :paramref:`.relationship.back_populates` parameter in order to point - the new relationship to the other side. +6. The classes are inspected for an existing mapped property matching these + names. If one is detected on one side, but none on the other side, + :class:`.AutomapBase` attempts to create a relationship on the missing side, + then uses the :paramref:`.relationship.back_populates` parameter in order to + point the new relationship to the other side. -6. In the usual case where no relationship is on either side, - :meth:`.AutomapBase.prepare` produces a :func:`.relationship` on the "many-to-one" - side and matches it to the other using the :paramref:`.relationship.backref` - parameter. +7. In the usual case where no relationship is on either side, + :meth:`.AutomapBase.prepare` produces a :func:`.relationship` on the + "many-to-one" side and matches it to the other using the + :paramref:`.relationship.backref` parameter. -7. Production of the :func:`.relationship` and optionally the :func:`.backref` +8. Production of the :func:`.relationship` and optionally the :func:`.backref` is handed off to the :paramref:`.AutomapBase.prepare.generate_relationship` function, which can be supplied by the end-user in order to augment the arguments passed to :func:`.relationship` or :func:`.backref` or to @@ -287,7 +311,7 @@ def _gen_relationship(base, direction, return_fn, # make use of the built-in function to actually return # the result. return generate_relationship(base, direction, return_fn, - attrname, local_cls, referred_cls, **kw) + attrname, local_cls, referred_cls, **kw) from sqlalchemy.ext.automap import automap_base from sqlalchemy import create_engine @@ -306,16 +330,17 @@ def _gen_relationship(base, direction, return_fn, those which contain a ``secondary`` argument. The process for producing these is as follows: -1. A given :class:`.Table` is examined for :class:`.ForeignKeyConstraint` objects, - before any mapped class has been assigned to it. +1. A given :class:`.Table` is examined for :class:`.ForeignKeyConstraint` + objects, before any mapped class has been assigned to it. 2. If the table contains two and exactly two :class:`.ForeignKeyConstraint` objects, and all columns within this table are members of these two :class:`.ForeignKeyConstraint` objects, the table is assumed to be a "secondary" table, and will **not be mapped directly**. -3. The two (or one, for self-referential) external tables to which the :class:`.Table` - refers to are matched to the classes to which they will be mapped, if any. +3. The two (or one, for self-referential) external tables to which the + :class:`.Table` refers to are matched to the classes to which they will be + mapped, if any. 4. If mapped classes for both sides are located, a many-to-many bi-directional :func:`.relationship` / :func:`.backref` pair is created between the two @@ -329,8 +354,8 @@ def _gen_relationship(base, direction, return_fn, ------------------------------ :mod:`.sqlalchemy.ext.automap` will not generate any relationships between -two classes that are in an inheritance relationship. That is, with two classes -given as follows:: +two classes that are in an inheritance relationship. That is, with two +classes given as follows:: class Employee(Base): __tablename__ = 'employee' @@ -347,8 +372,8 @@ class Engineer(Employee): 'polymorphic_identity':'engineer', } -The foreign key from ``Engineer`` to ``Employee`` is used not for a relationship, -but to establish joined inheritance between the two classes. +The foreign key from ``Engineer`` to ``Employee`` is used not for a +relationship, but to establish joined inheritance between the two classes. Note that this means automap will not generate *any* relationships for foreign keys that link from a subclass to a superclass. If a mapping @@ -372,13 +397,72 @@ class Engineer(Employee): id = Column(Integer, ForeignKey('employee.id'), primary_key=True) favorite_employee_id = Column(Integer, ForeignKey('employee.id')) - favorite_employee = relationship(Employee, foreign_keys=favorite_employee_id) + favorite_employee = relationship(Employee, + foreign_keys=favorite_employee_id) __mapper_args__ = { 'polymorphic_identity':'engineer', 'inherit_condition': id == Employee.id } +Handling Simple Naming Conflicts +-------------------------------- + +In the case of naming conflicts during mapping, override any of +:func:`.classname_for_table`, :func:`.name_for_scalar_relationship`, +and :func:`.name_for_collection_relationship` as needed. For example, if +automap is attempting to name a many-to-one relationship the same as an +existing column, an alternate convention can be conditionally selected. Given +a schema: + +.. sourcecode:: sql + + CREATE TABLE table_a ( + id INTEGER PRIMARY KEY + ); + + CREATE TABLE table_b ( + id INTEGER PRIMARY KEY, + table_a INTEGER, + FOREIGN KEY(table_a) REFERENCES table_a(id) + ); + +The above schema will first automap the ``table_a`` table as a class named +``table_a``; it will then automap a relationship onto the class for ``table_b`` +with the same name as this related class, e.g. ``table_a``. This +relationship name conflicts with the mapping column ``table_b.table_a``, +and will emit an error on mapping. + +We can resolve this conflict by using an underscore as follows:: + + def name_for_scalar_relationship(base, local_cls, referred_cls, constraint): + name = referred_cls.__name__.lower() + local_table = local_cls.__table__ + if name in local_table.columns: + newname = name + "_" + warnings.warn( + "Already detected name %s present. using %s" % + (name, newname)) + return newname + return name + + + Base.prepare(engine, reflect=True, + name_for_scalar_relationship=name_for_scalar_relationship) + +Alternatively, we can change the name on the column side. The columns +that are mapped can be modified using the technique described at +:ref:`mapper_column_distinct_names`, by assigning the column explicitly +to a new name:: + + Base = automap_base() + + class TableB(Base): + __tablename__ = 'table_b' + _table_a = Column('table_a', ForeignKey('table_a.id')) + + Base.prepare(engine, reflect=True) + Using Automap with Explicit Declarations ======================================== @@ -386,8 +470,8 @@ class Engineer(Employee): As noted previously, automap has no dependency on reflection, and can make use of any collection of :class:`.Table` objects within a :class:`.MetaData` collection. From this, it follows that automap can also be used -generate missing relationships given an otherwise complete model that fully defines -table metadata:: +generate missing relationships given an otherwise complete model that fully +defines table metadata:: from sqlalchemy.ext.automap import automap_base from sqlalchemy import Column, Integer, String, ForeignKey @@ -419,12 +503,12 @@ class Address(Base): Above, given mostly complete ``User`` and ``Address`` mappings, the :class:`.ForeignKey` which we defined on ``Address.user_id`` allowed a -bidirectional relationship pair ``Address.user`` and ``User.address_collection`` -to be generated on the mapped classes. +bidirectional relationship pair ``Address.user`` and +``User.address_collection`` to be generated on the mapped classes. -Note that when subclassing :class:`.AutomapBase`, the :meth:`.AutomapBase.prepare` -method is required; if not called, the classes we've declared are in an -un-mapped state. +Note that when subclassing :class:`.AutomapBase`, +the :meth:`.AutomapBase.prepare` method is required; if not called, the classes +we've declared are in an un-mapped state. """ @@ -458,15 +542,16 @@ def classname_for_table(base, tablename, table): .. note:: - In Python 2, the string used for the class name **must** be a non-Unicode - object, e.g. a ``str()`` object. The ``.name`` attribute of - :class:`.Table` is typically a Python unicode subclass, so the ``str()`` - function should be applied to this name, after accounting for any non-ASCII - characters. + In Python 2, the string used for the class name **must** be a + non-Unicode object, e.g. a ``str()`` object. The ``.name`` attribute + of :class:`.Table` is typically a Python unicode subclass, so the + ``str()`` function should be applied to this name, after accounting for + any non-ASCII characters. """ return str(tablename) + def name_for_scalar_relationship(base, local_cls, referred_cls, constraint): """Return the attribute name that should be used to refer from one class to another, for a scalar object reference. @@ -491,7 +576,9 @@ class to another, for a scalar object reference. """ return referred_cls.__name__.lower() -def name_for_collection_relationship(base, local_cls, referred_cls, constraint): + +def name_for_collection_relationship( + base, local_cls, referred_cls, constraint): """Return the attribute name that should be used to refer from one class to another, for a collection reference. @@ -500,7 +587,8 @@ class to another, for a collection reference. return referred_cls.__name__.lower() + "_collection" Alternate implementations - can be specified using the :paramref:`.AutomapBase.prepare.name_for_collection_relationship` + can be specified using the + :paramref:`.AutomapBase.prepare.name_for_collection_relationship` parameter. :param base: the :class:`.AutomapBase` class doing the prepare. @@ -515,7 +603,9 @@ class to another, for a collection reference. """ return referred_cls.__name__.lower() + "_collection" -def generate_relationship(base, direction, return_fn, attrname, local_cls, referred_cls, **kw): + +def generate_relationship( + base, direction, return_fn, attrname, local_cls, referred_cls, **kw): """Generate a :func:`.relationship` or :func:`.backref` on behalf of two mapped classes. @@ -534,14 +624,14 @@ def generate_relationship(base, direction, return_fn, attrname, local_cls, refer :param base: the :class:`.AutomapBase` class doing the prepare. :param direction: indicate the "direction" of the relationship; this will - be one of :data:`.ONETOMANY`, :data:`.MANYTOONE`, :data:`.MANYTOONE`. + be one of :data:`.ONETOMANY`, :data:`.MANYTOONE`, :data:`.MANYTOMANY`. :param return_fn: the function that is used by default to create the - relationship. This will be either :func:`.relationship` or :func:`.backref`. - The :func:`.backref` function's result will be used to produce a new - :func:`.relationship` in a second step, so it is critical that user-defined - implementations correctly differentiate between the two functions, if - a custom relationship function is being used. + relationship. This will be either :func:`.relationship` or + :func:`.backref`. The :func:`.backref` function's result will be used to + produce a new :func:`.relationship` in a second step, so it is critical + that user-defined implementations correctly differentiate between the two + functions, if a custom relationship function is being used. :attrname: the attribute name to which this relationship is being assigned. If the value of :paramref:`.generate_relationship.return_fn` is the @@ -551,8 +641,8 @@ def generate_relationship(base, direction, return_fn, attrname, local_cls, refer :param local_cls: the "local" class to which this relationship or backref will be locally present. - :param referred_cls: the "referred" class to which the relationship or backref - refers to. + :param referred_cls: the "referred" class to which the relationship or + backref refers to. :param \**kw: all additional keyword arguments are passed along to the function. @@ -568,6 +658,7 @@ def generate_relationship(base, direction, return_fn, attrname, local_cls, refer else: raise TypeError("Unknown relationship function: %s" % return_fn) + class AutomapBase(object): """Base class for an "automap" schema. @@ -600,44 +691,45 @@ class that is produced by the :func:`.declarative.declarative_base` """ @classmethod - def prepare(cls, - engine=None, - reflect=False, - classname_for_table=classname_for_table, - collection_class=list, - name_for_scalar_relationship=name_for_scalar_relationship, - name_for_collection_relationship=name_for_collection_relationship, - generate_relationship=generate_relationship): - + def prepare( + cls, + engine=None, + reflect=False, + classname_for_table=classname_for_table, + collection_class=list, + name_for_scalar_relationship=name_for_scalar_relationship, + name_for_collection_relationship=name_for_collection_relationship, + generate_relationship=generate_relationship): """Extract mapped classes and relationships from the :class:`.MetaData` and perform mappings. :param engine: an :class:`.Engine` or :class:`.Connection` with which to perform schema reflection, if specified. - If the :paramref:`.AutomapBase.prepare.reflect` argument is False, this - object is not used. + If the :paramref:`.AutomapBase.prepare.reflect` argument is False, + this object is not used. :param reflect: if True, the :meth:`.MetaData.reflect` method is called on the :class:`.MetaData` associated with this :class:`.AutomapBase`. - The :class:`.Engine` passed via :paramref:`.AutomapBase.prepare.engine` will - be used to perform the reflection if present; else, the :class:`.MetaData` - should already be bound to some engine else the operation will fail. + The :class:`.Engine` passed via + :paramref:`.AutomapBase.prepare.engine` will be used to perform the + reflection if present; else, the :class:`.MetaData` should already be + bound to some engine else the operation will fail. :param classname_for_table: callable function which will be used to produce new class names, given a table name. Defaults to :func:`.classname_for_table`. - :param name_for_scalar_relationship: callable function which will be used - to produce relationship names for scalar relationships. Defaults to - :func:`.name_for_scalar_relationship`. + :param name_for_scalar_relationship: callable function which will be + used to produce relationship names for scalar relationships. Defaults + to :func:`.name_for_scalar_relationship`. - :param name_for_collection_relationship: callable function which will be used - to produce relationship names for collection-oriented relationships. Defaults to - :func:`.name_for_collection_relationship`. + :param name_for_collection_relationship: callable function which will + be used to produce relationship names for collection-oriented + relationships. Defaults to :func:`.name_for_collection_relationship`. :param generate_relationship: callable function which will be used to - actually generate :func:`.relationship` and :func:`.backref` constructs. - Defaults to :func:`.generate_relationship`. + actually generate :func:`.relationship` and :func:`.backref` + constructs. Defaults to :func:`.generate_relationship`. :param collection_class: the Python collection class that will be used when a new :func:`.relationship` object is created that represents a @@ -646,16 +738,16 @@ def prepare(cls, """ if reflect: cls.metadata.reflect( - engine, - extend_existing=True, - autoload_replace=False - ) + engine, + extend_existing=True, + autoload_replace=False + ) table_to_map_config = dict( - (m.local_table, m) - for m in _DeferredMapperConfig. - classes_for_base(cls, sort=False) - ) + (m.local_table, m) + for m in _DeferredMapperConfig. + classes_for_base(cls, sort=False) + ) many_to_many = [] @@ -677,25 +769,24 @@ def prepare(cls, for map_config in table_to_map_config.values(): _relationships_for_fks(cls, - map_config, - table_to_map_config, - collection_class, - name_for_scalar_relationship, - name_for_collection_relationship, - generate_relationship) + map_config, + table_to_map_config, + collection_class, + name_for_scalar_relationship, + name_for_collection_relationship, + generate_relationship) for lcl_m2m, rem_m2m, m2m_const, table in many_to_many: _m2m_relationship(cls, lcl_m2m, rem_m2m, m2m_const, table, - table_to_map_config, - collection_class, - name_for_scalar_relationship, - name_for_collection_relationship, - generate_relationship) + table_to_map_config, + collection_class, + name_for_scalar_relationship, + name_for_collection_relationship, + generate_relationship) for map_config in _DeferredMapperConfig.classes_for_base(cls): map_config.map() - _sa_decl_prepare = True """Indicate that the mapping of classes should be deferred. @@ -717,6 +808,7 @@ def prepare(cls, """ + def automap_base(declarative_base=None, **kw): """Produce a declarative automap base. @@ -730,8 +822,8 @@ def automap_base(declarative_base=None, **kw): :param declarative_base: an existing class produced by :func:`.declarative.declarative_base`. When this is passed, the function - no longer invokes :func:`.declarative.declarative_base` itself, and all other - keyword arguments are ignored. + no longer invokes :func:`.declarative.declarative_base` itself, and all + other keyword arguments are ignored. :param \**kw: keyword arguments are passed along to :func:`.declarative.declarative_base`. @@ -743,20 +835,21 @@ def automap_base(declarative_base=None, **kw): Base = declarative_base return type( - Base.__name__, - (AutomapBase, Base,), - {"__abstract__": True, "classes": util.Properties({})} - ) + Base.__name__, + (AutomapBase, Base,), + {"__abstract__": True, "classes": util.Properties({})} + ) + def _is_many_to_many(automap_base, table): fk_constraints = [const for const in table.constraints - if isinstance(const, ForeignKeyConstraint)] + if isinstance(const, ForeignKeyConstraint)] if len(fk_constraints) != 2: return None, None, None cols = sum( - [[fk.parent for fk in fk_constraint.elements] - for fk_constraint in fk_constraints], []) + [[fk.parent for fk in fk_constraint.elements] + for fk_constraint in fk_constraints], []) if set(cols) != set(table.c): return None, None, None @@ -767,11 +860,12 @@ def _is_many_to_many(automap_base, table): fk_constraints ) + def _relationships_for_fks(automap_base, map_config, table_to_map_config, - collection_class, - name_for_scalar_relationship, - name_for_collection_relationship, - generate_relationship): + collection_class, + name_for_scalar_relationship, + name_for_collection_relationship, + generate_relationship): local_table = map_config.local_table local_cls = map_config.cls @@ -786,62 +880,88 @@ def _relationships_for_fks(automap_base, map_config, table_to_map_config, continue referred_cls = referred_cfg.cls - if local_cls is not referred_cls and issubclass(local_cls, referred_cls): + if local_cls is not referred_cls and issubclass( + local_cls, referred_cls): continue relationship_name = name_for_scalar_relationship( - automap_base, - local_cls, - referred_cls, constraint) + automap_base, + local_cls, + referred_cls, constraint) backref_name = name_for_collection_relationship( - automap_base, - referred_cls, - local_cls, - constraint - ) + automap_base, + referred_cls, + local_cls, + constraint + ) + + o2m_kws = {} + nullable = False not in set([fk.parent.nullable for fk in fks]) + if not nullable: + o2m_kws['cascade'] = "all, delete-orphan" + + if constraint.ondelete and \ + constraint.ondelete.lower() == "cascade": + o2m_kws['passive_deletes'] = True + else: + if constraint.ondelete and \ + constraint.ondelete.lower() == "set null": + o2m_kws['passive_deletes'] = True create_backref = backref_name not in referred_cfg.properties if relationship_name not in map_config.properties: if create_backref: - backref_obj = generate_relationship(automap_base, - interfaces.ONETOMANY, backref, - backref_name, referred_cls, local_cls, - collection_class=collection_class) + backref_obj = generate_relationship( + automap_base, + interfaces.ONETOMANY, backref, + backref_name, referred_cls, local_cls, + collection_class=collection_class, + **o2m_kws) else: backref_obj = None rel = generate_relationship(automap_base, - interfaces.MANYTOONE, - relationship, - relationship_name, - local_cls, referred_cls, - foreign_keys=[fk.parent for fk in constraint.elements], - backref=backref_obj, - remote_side=[fk.column for fk in constraint.elements] - ) + interfaces.MANYTOONE, + relationship, + relationship_name, + local_cls, referred_cls, + foreign_keys=[ + fk.parent + for fk in constraint.elements], + backref=backref_obj, + remote_side=[ + fk.column + for fk in constraint.elements] + ) if rel is not None: map_config.properties[relationship_name] = rel if not create_backref: - referred_cfg.properties[backref_name].back_populates = relationship_name + referred_cfg.properties[ + backref_name].back_populates = relationship_name elif create_backref: rel = generate_relationship(automap_base, - interfaces.ONETOMANY, - relationship, - backref_name, - referred_cls, local_cls, - foreign_keys=[fk.parent for fk in constraint.elements], - back_populates=relationship_name, - collection_class=collection_class) + interfaces.ONETOMANY, + relationship, + backref_name, + referred_cls, local_cls, + foreign_keys=[ + fk.parent + for fk in constraint.elements], + back_populates=relationship_name, + collection_class=collection_class, + **o2m_kws) if rel is not None: referred_cfg.properties[backref_name] = rel - map_config.properties[relationship_name].back_populates = backref_name + map_config.properties[ + relationship_name].back_populates = backref_name + def _m2m_relationship(automap_base, lcl_m2m, rem_m2m, m2m_const, table, - table_to_map_config, - collection_class, - name_for_scalar_relationship, - name_for_collection_relationship, - generate_relationship): + table_to_map_config, + collection_class, + name_for_scalar_relationship, + name_for_collection_relationship, + generate_relationship): map_config = table_to_map_config.get(lcl_m2m, None) referred_cfg = table_to_map_config.get(rem_m2m, None) @@ -852,56 +972,67 @@ def _m2m_relationship(automap_base, lcl_m2m, rem_m2m, m2m_const, table, referred_cls = referred_cfg.cls relationship_name = name_for_collection_relationship( - automap_base, - local_cls, - referred_cls, m2m_const[0]) + automap_base, + local_cls, + referred_cls, m2m_const[0]) backref_name = name_for_collection_relationship( - automap_base, - referred_cls, - local_cls, - m2m_const[1] - ) + automap_base, + referred_cls, + local_cls, + m2m_const[1] + ) create_backref = backref_name not in referred_cfg.properties if relationship_name not in map_config.properties: if create_backref: - backref_obj = generate_relationship(automap_base, - interfaces.MANYTOMANY, - backref, - backref_name, - referred_cls, local_cls, - collection_class=collection_class - ) + backref_obj = generate_relationship( + automap_base, + interfaces.MANYTOMANY, + backref, + backref_name, + referred_cls, local_cls, + collection_class=collection_class + ) else: backref_obj = None rel = generate_relationship(automap_base, - interfaces.MANYTOMANY, - relationship, - relationship_name, - local_cls, referred_cls, - secondary=table, - primaryjoin=and_(fk.column == fk.parent for fk in m2m_const[0].elements), - secondaryjoin=and_(fk.column == fk.parent for fk in m2m_const[1].elements), - backref=backref_obj, - collection_class=collection_class - ) + interfaces.MANYTOMANY, + relationship, + relationship_name, + local_cls, referred_cls, + secondary=table, + primaryjoin=and_( + fk.column == fk.parent + for fk in m2m_const[0].elements), + secondaryjoin=and_( + fk.column == fk.parent + for fk in m2m_const[1].elements), + backref=backref_obj, + collection_class=collection_class + ) if rel is not None: map_config.properties[relationship_name] = rel if not create_backref: - referred_cfg.properties[backref_name].back_populates = relationship_name + referred_cfg.properties[ + backref_name].back_populates = relationship_name elif create_backref: rel = generate_relationship(automap_base, - interfaces.MANYTOMANY, - relationship, - backref_name, - referred_cls, local_cls, - secondary=table, - primaryjoin=and_(fk.column == fk.parent for fk in m2m_const[1].elements), - secondaryjoin=and_(fk.column == fk.parent for fk in m2m_const[0].elements), - back_populates=relationship_name, - collection_class=collection_class) + interfaces.MANYTOMANY, + relationship, + backref_name, + referred_cls, local_cls, + secondary=table, + primaryjoin=and_( + fk.column == fk.parent + for fk in m2m_const[1].elements), + secondaryjoin=and_( + fk.column == fk.parent + for fk in m2m_const[0].elements), + back_populates=relationship_name, + collection_class=collection_class) if rel is not None: referred_cfg.properties[backref_name] = rel - map_config.properties[relationship_name].back_populates = backref_name + map_config.properties[ + relationship_name].back_populates = backref_name diff --git a/lib/sqlalchemy/ext/baked.py b/lib/sqlalchemy/ext/baked.py new file mode 100644 index 0000000000..2504be9ddf --- /dev/null +++ b/lib/sqlalchemy/ext/baked.py @@ -0,0 +1,523 @@ +# sqlalchemy/ext/baked.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +"""Baked query extension. + +Provides a creational pattern for the :class:`.query.Query` object which +allows the fully constructed object, Core select statement, and string +compiled result to be fully cached. + + +""" + +from ..orm.query import Query +from ..orm import strategies, attributes, properties, \ + strategy_options, util as orm_util, interfaces +from .. import log as sqla_log +from ..sql import util as sql_util +from ..orm import exc as orm_exc +from .. import exc as sa_exc +from .. import util + +import copy +import logging + +log = logging.getLogger(__name__) + + +class BakedQuery(object): + """A builder object for :class:`.query.Query` objects.""" + + __slots__ = 'steps', '_bakery', '_cache_key', '_spoiled' + + def __init__(self, bakery, initial_fn, args=()): + self._cache_key = () + self._update_cache_key(initial_fn, args) + self.steps = [initial_fn] + self._spoiled = False + self._bakery = bakery + + @classmethod + def bakery(cls, size=200): + """Construct a new bakery.""" + + _bakery = util.LRUCache(size) + + def call(initial_fn, *args): + return cls(_bakery, initial_fn, args) + + return call + + def _clone(self): + b1 = BakedQuery.__new__(BakedQuery) + b1._cache_key = self._cache_key + b1.steps = list(self.steps) + b1._bakery = self._bakery + b1._spoiled = self._spoiled + return b1 + + def _update_cache_key(self, fn, args=()): + self._cache_key += (fn.__code__,) + args + + def __iadd__(self, other): + if isinstance(other, tuple): + self.add_criteria(*other) + else: + self.add_criteria(other) + return self + + def __add__(self, other): + if isinstance(other, tuple): + return self.with_criteria(*other) + else: + return self.with_criteria(other) + + def add_criteria(self, fn, *args): + """Add a criteria function to this :class:`.BakedQuery`. + + This is equivalent to using the ``+=`` operator to + modify a :class:`.BakedQuery` in-place. + + """ + self._update_cache_key(fn, args) + self.steps.append(fn) + return self + + def with_criteria(self, fn, *args): + """Add a criteria function to a :class:`.BakedQuery` cloned from this one. + + This is equivalent to using the ``+`` operator to + produce a new :class:`.BakedQuery` with modifications. + + """ + return self._clone().add_criteria(fn, *args) + + def for_session(self, session): + """Return a :class:`.Result` object for this :class:`.BakedQuery`. + + This is equivalent to calling the :class:`.BakedQuery` as a + Python callable, e.g. ``result = my_baked_query(session)``. + + """ + return Result(self, session) + + def __call__(self, session): + return self.for_session(session) + + def spoil(self, full=False): + """Cancel any query caching that will occur on this BakedQuery object. + + The BakedQuery can continue to be used normally, however additional + creational functions will not be cached; they will be called + on every invocation. + + This is to support the case where a particular step in constructing + a baked query disqualifies the query from being cacheable, such + as a variant that relies upon some uncacheable value. + + :param full: if False, only functions added to this + :class:`.BakedQuery` object subsequent to the spoil step will be + non-cached; the state of the :class:`.BakedQuery` up until + this point will be pulled from the cache. If True, then the + entire :class:`.Query` object is built from scratch each + time, with all creational functions being called on each + invocation. + + """ + if not full: + _spoil_point = self._clone() + _spoil_point._cache_key += ('_query_only', ) + self.steps = [_spoil_point._retrieve_baked_query] + self._spoiled = True + return self + + def _retrieve_baked_query(self, session): + query = self._bakery.get(self._cache_key, None) + if query is None: + query = self._as_query(session) + self._bakery[self._cache_key] = query.with_session(None) + return query.with_session(session) + + def _bake(self, session): + query = self._as_query(session) + + context = query._compile_context() + self._bake_subquery_loaders(session, context) + context.session = None + context.query = query = context.query.with_session(None) + query._execution_options = query._execution_options.union( + {"compiled_cache": self._bakery} + ) + # we'll be holding onto the query for some of its state, + # so delete some compilation-use-only attributes that can take up + # space + for attr in ( + '_correlate', '_from_obj', '_mapper_adapter_map', + '_joinpath', '_joinpoint'): + query.__dict__.pop(attr, None) + self._bakery[self._cache_key] = context + return context + + def _as_query(self, session): + query = self.steps[0](session) + + for step in self.steps[1:]: + query = step(query) + return query + + def _bake_subquery_loaders(self, session, context): + """convert subquery eager loaders in the cache into baked queries. + + For subquery eager loading to work, all we need here is that the + Query point to the correct session when it is run. However, since + we are "baking" anyway, we may as well also turn the query into + a "baked" query so that we save on performance too. + + """ + context.attributes['baked_queries'] = baked_queries = [] + for k, v in list(context.attributes.items()): + if isinstance(v, Query): + if 'subquery' in k: + bk = BakedQuery(self._bakery, lambda *args: v) + bk._cache_key = self._cache_key + k + bk._bake(session) + baked_queries.append((k, bk._cache_key, v)) + del context.attributes[k] + + def _unbake_subquery_loaders(self, session, context, params): + """Retrieve subquery eager loaders stored by _bake_subquery_loaders + and turn them back into Result objects that will iterate just + like a Query object. + + """ + for k, cache_key, query in context.attributes["baked_queries"]: + bk = BakedQuery(self._bakery, lambda sess: query.with_session(sess)) + bk._cache_key = cache_key + context.attributes[k] = bk.for_session(session).params(**params) + + +class Result(object): + """Invokes a :class:`.BakedQuery` against a :class:`.Session`. + + The :class:`.Result` object is where the actual :class:`.query.Query` + object gets created, or retrieved from the cache, + against a target :class:`.Session`, and is then invoked for results. + + """ + __slots__ = 'bq', 'session', '_params' + + def __init__(self, bq, session): + self.bq = bq + self.session = session + self._params = {} + + def params(self, *args, **kw): + """Specify parameters to be replaced into the string SQL statement.""" + + if len(args) == 1: + kw.update(args[0]) + elif len(args) > 0: + raise sa_exc.ArgumentError( + "params() takes zero or one positional argument, " + "which is a dictionary.") + self._params.update(kw) + return self + + def _as_query(self): + return self.bq._as_query(self.session).params(self._params) + + def __str__(self): + return str(self._as_query()) + + def __iter__(self): + bq = self.bq + if bq._spoiled: + return iter(self._as_query()) + + baked_context = bq._bakery.get(bq._cache_key, None) + if baked_context is None: + baked_context = bq._bake(self.session) + + context = copy.copy(baked_context) + context.session = self.session + context.attributes = context.attributes.copy() + + bq._unbake_subquery_loaders(self.session, context, self._params) + + context.statement.use_labels = True + if context.autoflush and not context.populate_existing: + self.session._autoflush() + return context.query.params(self._params).\ + with_session(self.session)._execute_and_instances(context) + + def first(self): + """Return the first row. + + Equivalent to :meth:`.Query.first`. + + """ + bq = self.bq.with_criteria(lambda q: q.slice(0, 1)) + ret = list(bq.for_session(self.session).params(self._params)) + if len(ret) > 0: + return ret[0] + else: + return None + + def one(self): + """Return exactly one result or raise an exception. + + Equivalent to :meth:`.Query.one`. + + """ + ret = list(self) + + l = len(ret) + if l == 1: + return ret[0] + elif l == 0: + raise orm_exc.NoResultFound("No row was found for one()") + else: + raise orm_exc.MultipleResultsFound( + "Multiple rows were found for one()") + + def one_or_none(self): + """Return one or zero results, or raise an exception for multiple + rows. + + Equivalent to :meth:`.Query.one_or_none`. + + .. versionadded:: 1.0.9 + + """ + ret = list(self) + + l = len(ret) + if l == 1: + return ret[0] + elif l == 0: + return None + else: + raise orm_exc.MultipleResultsFound( + "Multiple rows were found for one_or_none()") + + def all(self): + """Return all rows. + + Equivalent to :meth:`.Query.all`. + + """ + return list(self) + + def get(self, ident): + """Retrieve an object based on identity. + + Equivalent to :meth:`.Query.get`. + + """ + + query = self.bq.steps[0](self.session) + return query._get_impl(ident, self._load_on_ident) + + def _load_on_ident(self, query, key): + """Load the given identity key from the database.""" + + ident = key[1] + + mapper = query._mapper_zero() + + _get_clause, _get_params = mapper._get_clause + + def setup(query): + _lcl_get_clause = _get_clause + q = query._clone() + q._get_condition() + q._order_by = None + + # None present in ident - turn those comparisons + # into "IS NULL" + if None in ident: + nones = set([ + _get_params[col].key for col, value in + zip(mapper.primary_key, ident) if value is None + ]) + _lcl_get_clause = sql_util.adapt_criterion_to_null( + _lcl_get_clause, nones) + + _lcl_get_clause = q._adapt_clause(_lcl_get_clause, True, False) + q._criterion = _lcl_get_clause + return q + + # cache the query against a key that includes + # which positions in the primary key are NULL + # (remember, we can map to an OUTER JOIN) + bq = self.bq + + # add the clause we got from mapper._get_clause to the cache + # key so that if a race causes multiple calls to _get_clause, + # we've cached on ours + bq = bq._clone() + bq._cache_key += (_get_clause, ) + + bq = bq.with_criteria(setup, tuple(elem is None for elem in ident)) + + params = dict([ + (_get_params[primary_key].key, id_val) + for id_val, primary_key in zip(ident, mapper.primary_key) + ]) + + result = list(bq.for_session(self.session).params(**params)) + l = len(result) + if l > 1: + raise orm_exc.MultipleResultsFound() + elif l: + return result[0] + else: + return None + + +def bake_lazy_loaders(): + """Enable the use of baked queries for all lazyloaders systemwide. + + This operation should be safe for all lazy loaders, and will reduce + Python overhead for these operations. + + """ + BakedLazyLoader._strategy_keys[:] = [] + + properties.RelationshipProperty.strategy_for( + lazy="select")(BakedLazyLoader) + properties.RelationshipProperty.strategy_for( + lazy=True)(BakedLazyLoader) + properties.RelationshipProperty.strategy_for( + lazy="baked_select")(BakedLazyLoader) + + strategies.LazyLoader._strategy_keys[:] = BakedLazyLoader._strategy_keys[:] + + +def unbake_lazy_loaders(): + """Disable the use of baked queries for all lazyloaders systemwide. + + This operation reverts the changes produced by :func:`.bake_lazy_loaders`. + + """ + strategies.LazyLoader._strategy_keys[:] = [] + BakedLazyLoader._strategy_keys[:] = [] + + properties.RelationshipProperty.strategy_for( + lazy="select")(strategies.LazyLoader) + properties.RelationshipProperty.strategy_for( + lazy=True)(strategies.LazyLoader) + properties.RelationshipProperty.strategy_for( + lazy="baked_select")(BakedLazyLoader) + assert strategies.LazyLoader._strategy_keys + + +@sqla_log.class_logger +@properties.RelationshipProperty.strategy_for(lazy="baked_select") +class BakedLazyLoader(strategies.LazyLoader): + + def _emit_lazyload(self, session, state, ident_key, passive): + q = BakedQuery( + self.mapper._compiled_cache, + lambda session: session.query(self.mapper)) + q.add_criteria( + lambda q: q._adapt_all_clauses()._with_invoke_all_eagers(False), + self.parent_property) + + if not self.parent_property.bake_queries: + q.spoil(full=True) + + if self.parent_property.secondary is not None: + q.add_criteria( + lambda q: + q.select_from(self.mapper, self.parent_property.secondary)) + + pending = not state.key + + # don't autoflush on pending + if pending or passive & attributes.NO_AUTOFLUSH: + q.add_criteria(lambda q: q.autoflush(False)) + + if state.load_path: + q.spoil() + q.add_criteria( + lambda q: + q._with_current_path(state.load_path[self.parent_property])) + + if state.load_options: + q.spoil() + q.add_criteria( + lambda q: q._conditional_options(*state.load_options)) + + if self.use_get: + return q(session)._load_on_ident( + session.query(self.mapper), ident_key) + + if self.parent_property.order_by: + q.add_criteria( + lambda q: + q.order_by(*util.to_list(self.parent_property.order_by))) + + for rev in self.parent_property._reverse_property: + # reverse props that are MANYTOONE are loading *this* + # object from get(), so don't need to eager out to those. + if rev.direction is interfaces.MANYTOONE and \ + rev._use_get and \ + not isinstance(rev.strategy, strategies.LazyLoader): + q.add_criteria( + lambda q: + q.options( + strategy_options.Load( + rev.parent).baked_lazyload(rev.key))) + + lazy_clause, params = self._generate_lazy_clause(state, passive) + + if pending: + if orm_util._none_set.intersection(params.values()): + return None + + q.add_criteria(lambda q: q.filter(lazy_clause)) + result = q(session).params(**params).all() + if self.uselist: + return result + else: + l = len(result) + if l: + if l > 1: + util.warn( + "Multiple rows returned with " + "uselist=False for lazily-loaded attribute '%s' " + % self.parent_property) + + return result[0] + else: + return None + + +@strategy_options.loader_option() +def baked_lazyload(loadopt, attr): + """Indicate that the given attribute should be loaded using "lazy" + loading with a "baked" query used in the load. + + """ + return loadopt.set_relationship_strategy(attr, {"lazy": "baked_select"}) + + +@baked_lazyload._add_unbound_fn +def baked_lazyload(*keys): + return strategy_options._UnboundLoad._from_keys( + strategy_options._UnboundLoad.baked_lazyload, keys, False, {}) + + +@baked_lazyload._add_unbound_all_fn +def baked_lazyload_all(*keys): + return strategy_options._UnboundLoad._from_keys( + strategy_options._UnboundLoad.baked_lazyload, keys, True, {}) + +baked_lazyload = baked_lazyload._unbound_fn +baked_lazyload_all = baked_lazyload_all._unbound_all_fn + +bakery = BakedQuery.bakery diff --git a/lib/sqlalchemy/ext/compiler.py b/lib/sqlalchemy/ext/compiler.py index 5dde74e098..86156be1fb 100644 --- a/lib/sqlalchemy/ext/compiler.py +++ b/lib/sqlalchemy/ext/compiler.py @@ -1,5 +1,6 @@ # ext/compiler.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -57,7 +58,8 @@ def visit_alter_column(element, compiler, **kw): @compiles(AlterColumn, 'postgresql') def visit_alter_column(element, compiler, **kw): - return "ALTER TABLE %s ALTER COLUMN %s ..." % (element.table.name, element.column.name) + return "ALTER TABLE %s ALTER COLUMN %s ..." % (element.table.name, + element.column.name) The second ``visit_alter_table`` will be invoked when any ``postgresql`` dialect is used. @@ -92,7 +94,8 @@ def visit_insert_from_select(element, compiler, **kw): Produces:: - "INSERT INTO mytable (SELECT mytable.x, mytable.y, mytable.z FROM mytable WHERE mytable.x > :x_1)" + "INSERT INTO mytable (SELECT mytable.x, mytable.y, mytable.z + FROM mytable WHERE mytable.x > :x_1)" .. note:: @@ -118,9 +121,19 @@ def visit_insert_from_select(element, compiler, **kw): def compile_my_constraint(constraint, ddlcompiler, **kw): return "CONSTRAINT %s CHECK (%s)" % ( constraint.name, - ddlcompiler.sql_compiler.process(constraint.expression) + ddlcompiler.sql_compiler.process( + constraint.expression, literal_binds=True) ) +Above, we add an additional flag to the process step as called by +:meth:`.SQLCompiler.process`, which is the ``literal_binds`` flag. This +indicates that any SQL expression which refers to a :class:`.BindParameter` +object or other "literal" object such as those which refer to strings or +integers should be rendered **in-place**, rather than being referred to as +a bound parameter; when emitting DDL, bound parameters are typically not +supported. + + .. _enabling_compiled_autocommit: Enabling Autocommit on a Construct @@ -319,7 +332,7 @@ def ms_utcnow(element, compiler, **kw): ------------------- The "GREATEST" function is given any number of arguments and returns the one -that is of the highest value - it's equivalent to Python's ``max`` +that is of the highest value - its equivalent to Python's ``max`` function. A SQL standard version versus a CASE based version which only accommodates two arguments:: @@ -407,7 +420,7 @@ def decorate(fn): # TODO: why is the lambda needed ? setattr(class_, '_compiler_dispatch', - lambda *arg, **kw: existing(*arg, **kw)) + lambda *arg, **kw: existing(*arg, **kw)) setattr(class_, '_compiler_dispatcher', existing) if specs: @@ -443,6 +456,6 @@ def __call__(self, element, compiler, **kw): fn = self.specs['default'] except KeyError: raise exc.CompileError( - "%s construct has no default " - "compilation handler." % type(element)) + "%s construct has no default " + "compilation handler." % type(element)) return fn(element, compiler, **kw) diff --git a/lib/sqlalchemy/ext/declarative/__init__.py b/lib/sqlalchemy/ext/declarative/__init__.py index 4010789b6b..f96a40252c 100644 --- a/lib/sqlalchemy/ext/declarative/__init__.py +++ b/lib/sqlalchemy/ext/declarative/__init__.py @@ -1,1309 +1,10 @@ # ext/declarative/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -""" -Synopsis -======== - -SQLAlchemy object-relational configuration involves the -combination of :class:`.Table`, :func:`.mapper`, and class -objects to define a mapped class. -:mod:`~sqlalchemy.ext.declarative` allows all three to be -expressed at once within the class declaration. As much as -possible, regular SQLAlchemy schema and ORM constructs are -used directly, so that configuration between "classical" ORM -usage and declarative remain highly similar. - -As a simple example:: - - from sqlalchemy.ext.declarative import declarative_base - - Base = declarative_base() - - class SomeClass(Base): - __tablename__ = 'some_table' - id = Column(Integer, primary_key=True) - name = Column(String(50)) - -Above, the :func:`declarative_base` callable returns a new base class from -which all mapped classes should inherit. When the class definition is -completed, a new :class:`.Table` and :func:`.mapper` will have been generated. - -The resulting table and mapper are accessible via -``__table__`` and ``__mapper__`` attributes on the -``SomeClass`` class:: - - # access the mapped Table - SomeClass.__table__ - - # access the Mapper - SomeClass.__mapper__ - -Defining Attributes -=================== - -In the previous example, the :class:`.Column` objects are -automatically named with the name of the attribute to which they are -assigned. - -To name columns explicitly with a name distinct from their mapped attribute, -just give the column a name. Below, column "some_table_id" is mapped to the -"id" attribute of `SomeClass`, but in SQL will be represented as -"some_table_id":: - - class SomeClass(Base): - __tablename__ = 'some_table' - id = Column("some_table_id", Integer, primary_key=True) - -Attributes may be added to the class after its construction, and they will be -added to the underlying :class:`.Table` and -:func:`.mapper` definitions as appropriate:: - - SomeClass.data = Column('data', Unicode) - SomeClass.related = relationship(RelatedInfo) - -Classes which are constructed using declarative can interact freely -with classes that are mapped explicitly with :func:`.mapper`. - -It is recommended, though not required, that all tables -share the same underlying :class:`~sqlalchemy.schema.MetaData` object, -so that string-configured :class:`~sqlalchemy.schema.ForeignKey` -references can be resolved without issue. - -Accessing the MetaData -======================= - -The :func:`declarative_base` base class contains a -:class:`.MetaData` object where newly defined -:class:`.Table` objects are collected. This object is -intended to be accessed directly for -:class:`.MetaData`-specific operations. Such as, to issue -CREATE statements for all tables:: - - engine = create_engine('sqlite://') - Base.metadata.create_all(engine) - -:func:`declarative_base` can also receive a pre-existing -:class:`.MetaData` object, which allows a -declarative setup to be associated with an already -existing traditional collection of :class:`~sqlalchemy.schema.Table` -objects:: - - mymetadata = MetaData() - Base = declarative_base(metadata=mymetadata) - - -.. _declarative_configuring_relationships: - -Configuring Relationships -========================= - -Relationships to other classes are done in the usual way, with the added -feature that the class specified to :func:`~sqlalchemy.orm.relationship` -may be a string name. The "class registry" associated with ``Base`` -is used at mapper compilation time to resolve the name into the actual -class object, which is expected to have been defined once the mapper -configuration is used:: - - class User(Base): - __tablename__ = 'users' - - id = Column(Integer, primary_key=True) - name = Column(String(50)) - addresses = relationship("Address", backref="user") - - class Address(Base): - __tablename__ = 'addresses' - - id = Column(Integer, primary_key=True) - email = Column(String(50)) - user_id = Column(Integer, ForeignKey('users.id')) - -Column constructs, since they are just that, are immediately usable, -as below where we define a primary join condition on the ``Address`` -class using them:: - - class Address(Base): - __tablename__ = 'addresses' - - id = Column(Integer, primary_key=True) - email = Column(String(50)) - user_id = Column(Integer, ForeignKey('users.id')) - user = relationship(User, primaryjoin=user_id == User.id) - -In addition to the main argument for :func:`~sqlalchemy.orm.relationship`, -other arguments which depend upon the columns present on an as-yet -undefined class may also be specified as strings. These strings are -evaluated as Python expressions. The full namespace available within -this evaluation includes all classes mapped for this declarative base, -as well as the contents of the ``sqlalchemy`` package, including -expression functions like :func:`~sqlalchemy.sql.expression.desc` and -:attr:`~sqlalchemy.sql.expression.func`:: - - class User(Base): - # .... - addresses = relationship("Address", - order_by="desc(Address.email)", - primaryjoin="Address.user_id==User.id") - -For the case where more than one module contains a class of the same name, -string class names can also be specified as module-qualified paths -within any of these string expressions:: - - class User(Base): - # .... - addresses = relationship("myapp.model.address.Address", - order_by="desc(myapp.model.address.Address.email)", - primaryjoin="myapp.model.address.Address.user_id==" - "myapp.model.user.User.id") - -The qualified path can be any partial path that removes ambiguity between -the names. For example, to disambiguate between -``myapp.model.address.Address`` and ``myapp.model.lookup.Address``, -we can specify ``address.Address`` or ``lookup.Address``:: - - class User(Base): - # .... - addresses = relationship("address.Address", - order_by="desc(address.Address.email)", - primaryjoin="address.Address.user_id==" - "User.id") - -.. versionadded:: 0.8 - module-qualified paths can be used when specifying string arguments - with Declarative, in order to specify specific modules. - -Two alternatives also exist to using string-based attributes. A lambda -can also be used, which will be evaluated after all mappers have been -configured:: - - class User(Base): - # ... - addresses = relationship(lambda: Address, - order_by=lambda: desc(Address.email), - primaryjoin=lambda: Address.user_id==User.id) - -Or, the relationship can be added to the class explicitly after the classes -are available:: - - User.addresses = relationship(Address, - primaryjoin=Address.user_id==User.id) - - - -.. _declarative_many_to_many: - -Configuring Many-to-Many Relationships -====================================== - -Many-to-many relationships are also declared in the same way -with declarative as with traditional mappings. The -``secondary`` argument to -:func:`.relationship` is as usual passed a -:class:`.Table` object, which is typically declared in the -traditional way. The :class:`.Table` usually shares -the :class:`.MetaData` object used by the declarative base:: - - keywords = Table( - 'keywords', Base.metadata, - Column('author_id', Integer, ForeignKey('authors.id')), - Column('keyword_id', Integer, ForeignKey('keywords.id')) - ) - - class Author(Base): - __tablename__ = 'authors' - id = Column(Integer, primary_key=True) - keywords = relationship("Keyword", secondary=keywords) - -Like other :func:`~sqlalchemy.orm.relationship` arguments, a string is accepted -as well, passing the string name of the table as defined in the -``Base.metadata.tables`` collection:: - - class Author(Base): - __tablename__ = 'authors' - id = Column(Integer, primary_key=True) - keywords = relationship("Keyword", secondary="keywords") - -As with traditional mapping, its generally not a good idea to use -a :class:`.Table` as the "secondary" argument which is also mapped to -a class, unless the :func:`.relationship` is declared with ``viewonly=True``. -Otherwise, the unit-of-work system may attempt duplicate INSERT and -DELETE statements against the underlying table. - -.. _declarative_sql_expressions: - -Defining SQL Expressions -======================== - -See :ref:`mapper_sql_expressions` for examples on declaratively -mapping attributes to SQL expressions. - -.. _declarative_table_args: - -Table Configuration -=================== - -Table arguments other than the name, metadata, and mapped Column -arguments are specified using the ``__table_args__`` class attribute. -This attribute accommodates both positional as well as keyword -arguments that are normally sent to the -:class:`~sqlalchemy.schema.Table` constructor. -The attribute can be specified in one of two forms. One is as a -dictionary:: - - class MyClass(Base): - __tablename__ = 'sometable' - __table_args__ = {'mysql_engine':'InnoDB'} - -The other, a tuple, where each argument is positional -(usually constraints):: - - class MyClass(Base): - __tablename__ = 'sometable' - __table_args__ = ( - ForeignKeyConstraint(['id'], ['remote_table.id']), - UniqueConstraint('foo'), - ) - -Keyword arguments can be specified with the above form by -specifying the last argument as a dictionary:: - - class MyClass(Base): - __tablename__ = 'sometable' - __table_args__ = ( - ForeignKeyConstraint(['id'], ['remote_table.id']), - UniqueConstraint('foo'), - {'autoload':True} - ) - -Using a Hybrid Approach with __table__ -======================================= - -As an alternative to ``__tablename__``, a direct -:class:`~sqlalchemy.schema.Table` construct may be used. The -:class:`~sqlalchemy.schema.Column` objects, which in this case require -their names, will be added to the mapping just like a regular mapping -to a table:: - - class MyClass(Base): - __table__ = Table('my_table', Base.metadata, - Column('id', Integer, primary_key=True), - Column('name', String(50)) - ) - -``__table__`` provides a more focused point of control for establishing -table metadata, while still getting most of the benefits of using declarative. -An application that uses reflection might want to load table metadata elsewhere -and pass it to declarative classes:: - - from sqlalchemy.ext.declarative import declarative_base - - Base = declarative_base() - Base.metadata.reflect(some_engine) - - class User(Base): - __table__ = metadata.tables['user'] - - class Address(Base): - __table__ = metadata.tables['address'] - -Some configuration schemes may find it more appropriate to use ``__table__``, -such as those which already take advantage of the data-driven nature of -:class:`.Table` to customize and/or automate schema definition. - -Note that when the ``__table__`` approach is used, the object is immediately -usable as a plain :class:`.Table` within the class declaration body itself, -as a Python class is only another syntactical block. Below this is illustrated -by using the ``id`` column in the ``primaryjoin`` condition of a -:func:`.relationship`:: - - class MyClass(Base): - __table__ = Table('my_table', Base.metadata, - Column('id', Integer, primary_key=True), - Column('name', String(50)) - ) - - widgets = relationship(Widget, - primaryjoin=Widget.myclass_id==__table__.c.id) - -Similarly, mapped attributes which refer to ``__table__`` can be placed inline, -as below where we assign the ``name`` column to the attribute ``_name``, -generating a synonym for ``name``:: - - from sqlalchemy.ext.declarative import synonym_for - - class MyClass(Base): - __table__ = Table('my_table', Base.metadata, - Column('id', Integer, primary_key=True), - Column('name', String(50)) - ) - - _name = __table__.c.name - - @synonym_for("_name") - def name(self): - return "Name: %s" % _name - -Using Reflection with Declarative -================================= - -It's easy to set up a :class:`.Table` that uses ``autoload=True`` -in conjunction with a mapped class:: - - class MyClass(Base): - __table__ = Table('mytable', Base.metadata, - autoload=True, autoload_with=some_engine) - -However, one improvement that can be made here is to not -require the :class:`.Engine` to be available when classes are -being first declared. To achieve this, use the -:class:`.DeferredReflection` mixin, which sets up mappings -only after a special ``prepare(engine)`` step is called:: - - from sqlalchemy.ext.declarative import declarative_base, DeferredReflection - - Base = declarative_base(cls=DeferredReflection) - - class Foo(Base): - __tablename__ = 'foo' - bars = relationship("Bar") - - class Bar(Base): - __tablename__ = 'bar' - - # illustrate overriding of "bar.foo_id" to have - # a foreign key constraint otherwise not - # reflected, such as when using MySQL - foo_id = Column(Integer, ForeignKey('foo.id')) - - Base.prepare(e) - -.. versionadded:: 0.8 - Added :class:`.DeferredReflection`. - -Mapper Configuration -==================== - -Declarative makes use of the :func:`~.orm.mapper` function internally -when it creates the mapping to the declared table. The options -for :func:`~.orm.mapper` are passed directly through via the -``__mapper_args__`` class attribute. As always, arguments which reference -locally mapped columns can reference them directly from within the -class declaration:: - - from datetime import datetime - - class Widget(Base): - __tablename__ = 'widgets' - - id = Column(Integer, primary_key=True) - timestamp = Column(DateTime, nullable=False) - - __mapper_args__ = { - 'version_id_col': timestamp, - 'version_id_generator': lambda v:datetime.now() - } - -.. _declarative_inheritance: - -Inheritance Configuration -========================= - -Declarative supports all three forms of inheritance as intuitively -as possible. The ``inherits`` mapper keyword argument is not needed -as declarative will determine this from the class itself. The various -"polymorphic" keyword arguments are specified using ``__mapper_args__``. - -Joined Table Inheritance -~~~~~~~~~~~~~~~~~~~~~~~~ - -Joined table inheritance is defined as a subclass that defines its own -table:: - - class Person(Base): - __tablename__ = 'people' - id = Column(Integer, primary_key=True) - discriminator = Column('type', String(50)) - __mapper_args__ = {'polymorphic_on': discriminator} - - class Engineer(Person): - __tablename__ = 'engineers' - __mapper_args__ = {'polymorphic_identity': 'engineer'} - id = Column(Integer, ForeignKey('people.id'), primary_key=True) - primary_language = Column(String(50)) - -Note that above, the ``Engineer.id`` attribute, since it shares the -same attribute name as the ``Person.id`` attribute, will in fact -represent the ``people.id`` and ``engineers.id`` columns together, -with the "Engineer.id" column taking precedence if queried directly. -To provide the ``Engineer`` class with an attribute that represents -only the ``engineers.id`` column, give it a different attribute name:: - - class Engineer(Person): - __tablename__ = 'engineers' - __mapper_args__ = {'polymorphic_identity': 'engineer'} - engineer_id = Column('id', Integer, ForeignKey('people.id'), - primary_key=True) - primary_language = Column(String(50)) - - -.. versionchanged:: 0.7 joined table inheritance favors the subclass - column over that of the superclass, such as querying above - for ``Engineer.id``. Prior to 0.7 this was the reverse. - -.. _declarative_single_table: - -Single Table Inheritance -~~~~~~~~~~~~~~~~~~~~~~~~ - -Single table inheritance is defined as a subclass that does not have -its own table; you just leave out the ``__table__`` and ``__tablename__`` -attributes:: - - class Person(Base): - __tablename__ = 'people' - id = Column(Integer, primary_key=True) - discriminator = Column('type', String(50)) - __mapper_args__ = {'polymorphic_on': discriminator} - - class Engineer(Person): - __mapper_args__ = {'polymorphic_identity': 'engineer'} - primary_language = Column(String(50)) - -When the above mappers are configured, the ``Person`` class is mapped -to the ``people`` table *before* the ``primary_language`` column is -defined, and this column will not be included in its own mapping. -When ``Engineer`` then defines the ``primary_language`` column, the -column is added to the ``people`` table so that it is included in the -mapping for ``Engineer`` and is also part of the table's full set of -columns. Columns which are not mapped to ``Person`` are also excluded -from any other single or joined inheriting classes using the -``exclude_properties`` mapper argument. Below, ``Manager`` will have -all the attributes of ``Person`` and ``Manager`` but *not* the -``primary_language`` attribute of ``Engineer``:: - - class Manager(Person): - __mapper_args__ = {'polymorphic_identity': 'manager'} - golf_swing = Column(String(50)) - -The attribute exclusion logic is provided by the -``exclude_properties`` mapper argument, and declarative's default -behavior can be disabled by passing an explicit ``exclude_properties`` -collection (empty or otherwise) to the ``__mapper_args__``. - -Resolving Column Conflicts -^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Note above that the ``primary_language`` and ``golf_swing`` columns -are "moved up" to be applied to ``Person.__table__``, as a result of their -declaration on a subclass that has no table of its own. A tricky case -comes up when two subclasses want to specify *the same* column, as below:: - - class Person(Base): - __tablename__ = 'people' - id = Column(Integer, primary_key=True) - discriminator = Column('type', String(50)) - __mapper_args__ = {'polymorphic_on': discriminator} - - class Engineer(Person): - __mapper_args__ = {'polymorphic_identity': 'engineer'} - start_date = Column(DateTime) - - class Manager(Person): - __mapper_args__ = {'polymorphic_identity': 'manager'} - start_date = Column(DateTime) - -Above, the ``start_date`` column declared on both ``Engineer`` and ``Manager`` -will result in an error:: - - sqlalchemy.exc.ArgumentError: Column 'start_date' on class - conflicts with existing - column 'people.start_date' - -In a situation like this, Declarative can't be sure -of the intent, especially if the ``start_date`` columns had, for example, -different types. A situation like this can be resolved by using -:class:`.declared_attr` to define the :class:`.Column` conditionally, taking -care to return the **existing column** via the parent ``__table__`` if it -already exists:: - - from sqlalchemy.ext.declarative import declared_attr - - class Person(Base): - __tablename__ = 'people' - id = Column(Integer, primary_key=True) - discriminator = Column('type', String(50)) - __mapper_args__ = {'polymorphic_on': discriminator} - - class Engineer(Person): - __mapper_args__ = {'polymorphic_identity': 'engineer'} - - @declared_attr - def start_date(cls): - "Start date column, if not present already." - return Person.__table__.c.get('start_date', Column(DateTime)) - - class Manager(Person): - __mapper_args__ = {'polymorphic_identity': 'manager'} - - @declared_attr - def start_date(cls): - "Start date column, if not present already." - return Person.__table__.c.get('start_date', Column(DateTime)) - -Above, when ``Manager`` is mapped, the ``start_date`` column is -already present on the ``Person`` class. Declarative lets us return -that :class:`.Column` as a result in this case, where it knows to skip -re-assigning the same column. If the mapping is mis-configured such -that the ``start_date`` column is accidentally re-assigned to a -different table (such as, if we changed ``Manager`` to be joined -inheritance without fixing ``start_date``), an error is raised which -indicates an existing :class:`.Column` is trying to be re-assigned to -a different owning :class:`.Table`. - -.. versionadded:: 0.8 :class:`.declared_attr` can be used on a non-mixin - class, and the returned :class:`.Column` or other mapped attribute - will be applied to the mapping as any other attribute. Previously, - the resulting attribute would be ignored, and also result in a warning - being emitted when a subclass was created. - -.. versionadded:: 0.8 :class:`.declared_attr`, when used either with a - mixin or non-mixin declarative class, can return an existing - :class:`.Column` already assigned to the parent :class:`.Table`, - to indicate that the re-assignment of the :class:`.Column` should be - skipped, however should still be mapped on the target class, - in order to resolve duplicate column conflicts. - -The same concept can be used with mixin classes (see -:ref:`declarative_mixins`):: - - class Person(Base): - __tablename__ = 'people' - id = Column(Integer, primary_key=True) - discriminator = Column('type', String(50)) - __mapper_args__ = {'polymorphic_on': discriminator} - - class HasStartDate(object): - @declared_attr - def start_date(cls): - return cls.__table__.c.get('start_date', Column(DateTime)) - - class Engineer(HasStartDate, Person): - __mapper_args__ = {'polymorphic_identity': 'engineer'} - - class Manager(HasStartDate, Person): - __mapper_args__ = {'polymorphic_identity': 'manager'} - -The above mixin checks the local ``__table__`` attribute for the column. -Because we're using single table inheritance, we're sure that in this case, -``cls.__table__`` refers to ``People.__table__``. If we were mixing joined- -and single-table inheritance, we might want our mixin to check more carefully -if ``cls.__table__`` is really the :class:`.Table` we're looking for. - -Concrete Table Inheritance -~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Concrete is defined as a subclass which has its own table and sets the -``concrete`` keyword argument to ``True``:: - - class Person(Base): - __tablename__ = 'people' - id = Column(Integer, primary_key=True) - name = Column(String(50)) - - class Engineer(Person): - __tablename__ = 'engineers' - __mapper_args__ = {'concrete':True} - id = Column(Integer, primary_key=True) - primary_language = Column(String(50)) - name = Column(String(50)) - -Usage of an abstract base class is a little less straightforward as it -requires usage of :func:`~sqlalchemy.orm.util.polymorphic_union`, -which needs to be created with the :class:`.Table` objects -before the class is built:: - - engineers = Table('engineers', Base.metadata, - Column('id', Integer, primary_key=True), - Column('name', String(50)), - Column('primary_language', String(50)) - ) - managers = Table('managers', Base.metadata, - Column('id', Integer, primary_key=True), - Column('name', String(50)), - Column('golf_swing', String(50)) - ) - - punion = polymorphic_union({ - 'engineer':engineers, - 'manager':managers - }, 'type', 'punion') - - class Person(Base): - __table__ = punion - __mapper_args__ = {'polymorphic_on':punion.c.type} - - class Engineer(Person): - __table__ = engineers - __mapper_args__ = {'polymorphic_identity':'engineer', 'concrete':True} - - class Manager(Person): - __table__ = managers - __mapper_args__ = {'polymorphic_identity':'manager', 'concrete':True} - -.. _declarative_concrete_helpers: - -Using the Concrete Helpers -^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Helper classes provides a simpler pattern for concrete inheritance. -With these objects, the ``__declare_first__`` helper is used to configure the -"polymorphic" loader for the mapper after all subclasses have been declared. - -.. versionadded:: 0.7.3 - -An abstract base can be declared using the -:class:`.AbstractConcreteBase` class:: - - from sqlalchemy.ext.declarative import AbstractConcreteBase - - class Employee(AbstractConcreteBase, Base): - pass - -To have a concrete ``employee`` table, use :class:`.ConcreteBase` instead:: - - from sqlalchemy.ext.declarative import ConcreteBase - - class Employee(ConcreteBase, Base): - __tablename__ = 'employee' - employee_id = Column(Integer, primary_key=True) - name = Column(String(50)) - __mapper_args__ = { - 'polymorphic_identity':'employee', - 'concrete':True} - - -Either ``Employee`` base can be used in the normal fashion:: - - class Manager(Employee): - __tablename__ = 'manager' - employee_id = Column(Integer, primary_key=True) - name = Column(String(50)) - manager_data = Column(String(40)) - __mapper_args__ = { - 'polymorphic_identity':'manager', - 'concrete':True} - - class Engineer(Employee): - __tablename__ = 'engineer' - employee_id = Column(Integer, primary_key=True) - name = Column(String(50)) - engineer_info = Column(String(40)) - __mapper_args__ = {'polymorphic_identity':'engineer', - 'concrete':True} - - -The :class:`.AbstractConcreteBase` class is itself mapped, and can be -used as a target of relationships:: - - class Company(Base): - __tablename__ = 'company' - - id = Column(Integer, primary_key=True) - employees = relationship("Employee", - primaryjoin="Company.id == Employee.company_id") - - -.. versionchanged:: 0.9.3 Support for use of :class:`.AbstractConcreteBase` - as the target of a :func:`.relationship` has been improved. - -It can also be queried directly:: - - for employee in session.query(Employee).filter(Employee.name == 'qbert'): - print(employee) - - -.. _declarative_mixins: - -Mixin and Custom Base Classes -============================== - -A common need when using :mod:`~sqlalchemy.ext.declarative` is to -share some functionality, such as a set of common columns, some common -table options, or other mapped properties, across many -classes. The standard Python idioms for this is to have the classes -inherit from a base which includes these common features. - -When using :mod:`~sqlalchemy.ext.declarative`, this idiom is allowed -via the usage of a custom declarative base class, as well as a "mixin" class -which is inherited from in addition to the primary base. Declarative -includes several helper features to make this work in terms of how -mappings are declared. An example of some commonly mixed-in -idioms is below:: - - from sqlalchemy.ext.declarative import declared_attr - - class MyMixin(object): - - @declared_attr - def __tablename__(cls): - return cls.__name__.lower() - - __table_args__ = {'mysql_engine': 'InnoDB'} - __mapper_args__= {'always_refresh': True} - - id = Column(Integer, primary_key=True) - - class MyModel(MyMixin, Base): - name = Column(String(1000)) - -Where above, the class ``MyModel`` will contain an "id" column -as the primary key, a ``__tablename__`` attribute that derives -from the name of the class itself, as well as ``__table_args__`` -and ``__mapper_args__`` defined by the ``MyMixin`` mixin class. - -There's no fixed convention over whether ``MyMixin`` precedes -``Base`` or not. Normal Python method resolution rules apply, and -the above example would work just as well with:: - - class MyModel(Base, MyMixin): - name = Column(String(1000)) - -This works because ``Base`` here doesn't define any of the -variables that ``MyMixin`` defines, i.e. ``__tablename__``, -``__table_args__``, ``id``, etc. If the ``Base`` did define -an attribute of the same name, the class placed first in the -inherits list would determine which attribute is used on the -newly defined class. - -Augmenting the Base -~~~~~~~~~~~~~~~~~~~ - -In addition to using a pure mixin, most of the techniques in this -section can also be applied to the base class itself, for patterns that -should apply to all classes derived from a particular base. This is achieved -using the ``cls`` argument of the :func:`.declarative_base` function:: - - from sqlalchemy.ext.declarative import declared_attr - - class Base(object): - @declared_attr - def __tablename__(cls): - return cls.__name__.lower() - - __table_args__ = {'mysql_engine': 'InnoDB'} - - id = Column(Integer, primary_key=True) - - from sqlalchemy.ext.declarative import declarative_base - - Base = declarative_base(cls=Base) - - class MyModel(Base): - name = Column(String(1000)) - -Where above, ``MyModel`` and all other classes that derive from ``Base`` will -have a table name derived from the class name, an ``id`` primary key column, -as well as the "InnoDB" engine for MySQL. - -Mixing in Columns -~~~~~~~~~~~~~~~~~ - -The most basic way to specify a column on a mixin is by simple -declaration:: - - class TimestampMixin(object): - created_at = Column(DateTime, default=func.now()) - - class MyModel(TimestampMixin, Base): - __tablename__ = 'test' - - id = Column(Integer, primary_key=True) - name = Column(String(1000)) - -Where above, all declarative classes that include ``TimestampMixin`` -will also have a column ``created_at`` that applies a timestamp to -all row insertions. - -Those familiar with the SQLAlchemy expression language know that -the object identity of clause elements defines their role in a schema. -Two ``Table`` objects ``a`` and ``b`` may both have a column called -``id``, but the way these are differentiated is that ``a.c.id`` -and ``b.c.id`` are two distinct Python objects, referencing their -parent tables ``a`` and ``b`` respectively. - -In the case of the mixin column, it seems that only one -:class:`.Column` object is explicitly created, yet the ultimate -``created_at`` column above must exist as a distinct Python object -for each separate destination class. To accomplish this, the declarative -extension creates a **copy** of each :class:`.Column` object encountered on -a class that is detected as a mixin. - -This copy mechanism is limited to simple columns that have no foreign -keys, as a :class:`.ForeignKey` itself contains references to columns -which can't be properly recreated at this level. For columns that -have foreign keys, as well as for the variety of mapper-level constructs -that require destination-explicit context, the -:class:`~.declared_attr` decorator is provided so that -patterns common to many classes can be defined as callables:: - - from sqlalchemy.ext.declarative import declared_attr - - class ReferenceAddressMixin(object): - @declared_attr - def address_id(cls): - return Column(Integer, ForeignKey('address.id')) - - class User(ReferenceAddressMixin, Base): - __tablename__ = 'user' - id = Column(Integer, primary_key=True) - -Where above, the ``address_id`` class-level callable is executed at the -point at which the ``User`` class is constructed, and the declarative -extension can use the resulting :class:`.Column` object as returned by -the method without the need to copy it. - -.. versionchanged:: > 0.6.5 - Rename 0.6.5 ``sqlalchemy.util.classproperty`` - into :class:`~.declared_attr`. - -Columns generated by :class:`~.declared_attr` can also be -referenced by ``__mapper_args__`` to a limited degree, currently -by ``polymorphic_on`` and ``version_id_col``, by specifying the -classdecorator itself into the dictionary - the declarative extension -will resolve them at class construction time:: - - class MyMixin: - @declared_attr - def type_(cls): - return Column(String(50)) - - __mapper_args__= {'polymorphic_on':type_} - - class MyModel(MyMixin, Base): - __tablename__='test' - id = Column(Integer, primary_key=True) - - - -Mixing in Relationships -~~~~~~~~~~~~~~~~~~~~~~~ - -Relationships created by :func:`~sqlalchemy.orm.relationship` are provided -with declarative mixin classes exclusively using the -:class:`.declared_attr` approach, eliminating any ambiguity -which could arise when copying a relationship and its possibly column-bound -contents. Below is an example which combines a foreign key column and a -relationship so that two classes ``Foo`` and ``Bar`` can both be configured to -reference a common target class via many-to-one:: - - class RefTargetMixin(object): - @declared_attr - def target_id(cls): - return Column('target_id', ForeignKey('target.id')) - - @declared_attr - def target(cls): - return relationship("Target") - - class Foo(RefTargetMixin, Base): - __tablename__ = 'foo' - id = Column(Integer, primary_key=True) - - class Bar(RefTargetMixin, Base): - __tablename__ = 'bar' - id = Column(Integer, primary_key=True) - - class Target(Base): - __tablename__ = 'target' - id = Column(Integer, primary_key=True) - -Using Advanced Relationship Arguments (e.g. ``primaryjoin``, etc.) -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -:func:`~sqlalchemy.orm.relationship` definitions which require explicit -primaryjoin, order_by etc. expressions should in all but the most -simplistic cases use **late bound** forms -for these arguments, meaning, using either the string form or a lambda. -The reason for this is that the related :class:`.Column` objects which are to -be configured using ``@declared_attr`` are not available to another -``@declared_attr`` attribute; while the methods will work and return new -:class:`.Column` objects, those are not the :class:`.Column` objects that -Declarative will be using as it calls the methods on its own, thus using -*different* :class:`.Column` objects. - -The canonical example is the primaryjoin condition that depends upon -another mixed-in column:: - - class RefTargetMixin(object): - @declared_attr - def target_id(cls): - return Column('target_id', ForeignKey('target.id')) - - @declared_attr - def target(cls): - return relationship(Target, - primaryjoin=Target.id==cls.target_id # this is *incorrect* - ) - -Mapping a class using the above mixin, we will get an error like:: - - sqlalchemy.exc.InvalidRequestError: this ForeignKey's parent column is not - yet associated with a Table. - -This is because the ``target_id`` :class:`.Column` we've called upon in our ``target()`` -method is not the same :class:`.Column` that declarative is actually going to map -to our table. - -The condition above is resolved using a lambda:: - - class RefTargetMixin(object): - @declared_attr - def target_id(cls): - return Column('target_id', ForeignKey('target.id')) - - @declared_attr - def target(cls): - return relationship(Target, - primaryjoin=lambda: Target.id==cls.target_id - ) - -or alternatively, the string form (which ultmately generates a lambda):: - - class RefTargetMixin(object): - @declared_attr - def target_id(cls): - return Column('target_id', ForeignKey('target.id')) - - @declared_attr - def target(cls): - return relationship("Target", - primaryjoin="Target.id==%s.target_id" % cls.__name__ - ) - -Mixing in deferred(), column_property(), and other MapperProperty classes -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Like :func:`~sqlalchemy.orm.relationship`, all -:class:`~sqlalchemy.orm.interfaces.MapperProperty` subclasses such as -:func:`~sqlalchemy.orm.deferred`, :func:`~sqlalchemy.orm.column_property`, -etc. ultimately involve references to columns, and therefore, when -used with declarative mixins, have the :class:`.declared_attr` -requirement so that no reliance on copying is needed:: - - class SomethingMixin(object): - - @declared_attr - def dprop(cls): - return deferred(Column(Integer)) - - class Something(SomethingMixin, Base): - __tablename__ = "something" - -Mixing in Association Proxy and Other Attributes -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Mixins can specify user-defined attributes as well as other extension -units such as :func:`.association_proxy`. The usage of -:class:`.declared_attr` is required in those cases where the attribute must -be tailored specifically to the target subclass. An example is when -constructing multiple :func:`.association_proxy` attributes which each -target a different type of child object. Below is an -:func:`.association_proxy` / mixin example which provides a scalar list of -string values to an implementing class:: - - from sqlalchemy import Column, Integer, ForeignKey, String - from sqlalchemy.orm import relationship - from sqlalchemy.ext.associationproxy import association_proxy - from sqlalchemy.ext.declarative import declarative_base, declared_attr - - Base = declarative_base() - - class HasStringCollection(object): - @declared_attr - def _strings(cls): - class StringAttribute(Base): - __tablename__ = cls.string_table_name - id = Column(Integer, primary_key=True) - value = Column(String(50), nullable=False) - parent_id = Column(Integer, - ForeignKey('%s.id' % cls.__tablename__), - nullable=False) - def __init__(self, value): - self.value = value - - return relationship(StringAttribute) - - @declared_attr - def strings(cls): - return association_proxy('_strings', 'value') - - class TypeA(HasStringCollection, Base): - __tablename__ = 'type_a' - string_table_name = 'type_a_strings' - id = Column(Integer(), primary_key=True) - - class TypeB(HasStringCollection, Base): - __tablename__ = 'type_b' - string_table_name = 'type_b_strings' - id = Column(Integer(), primary_key=True) - -Above, the ``HasStringCollection`` mixin produces a :func:`.relationship` -which refers to a newly generated class called ``StringAttribute``. The -``StringAttribute`` class is generated with it's own :class:`.Table` -definition which is local to the parent class making usage of the -``HasStringCollection`` mixin. It also produces an :func:`.association_proxy` -object which proxies references to the ``strings`` attribute onto the ``value`` -attribute of each ``StringAttribute`` instance. - -``TypeA`` or ``TypeB`` can be instantiated given the constructor -argument ``strings``, a list of strings:: - - ta = TypeA(strings=['foo', 'bar']) - tb = TypeA(strings=['bat', 'bar']) - -This list will generate a collection -of ``StringAttribute`` objects, which are persisted into a table that's -local to either the ``type_a_strings`` or ``type_b_strings`` table:: - - >>> print ta._strings - [<__main__.StringAttribute object at 0x10151cd90>, - <__main__.StringAttribute object at 0x10151ce10>] - -When constructing the :func:`.association_proxy`, the -:class:`.declared_attr` decorator must be used so that a distinct -:func:`.association_proxy` object is created for each of the ``TypeA`` -and ``TypeB`` classes. - -.. versionadded:: 0.8 :class:`.declared_attr` is usable with non-mapped - attributes, including user-defined attributes as well as - :func:`.association_proxy`. - - -Controlling table inheritance with mixins -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -The ``__tablename__`` attribute in conjunction with the hierarchy of -classes involved in a declarative mixin scenario controls what type of -table inheritance, if any, -is configured by the declarative extension. - -If the ``__tablename__`` is computed by a mixin, you may need to -control which classes get the computed attribute in order to get the -type of table inheritance you require. - -For example, if you had a mixin that computes ``__tablename__`` but -where you wanted to use that mixin in a single table inheritance -hierarchy, you can explicitly specify ``__tablename__`` as ``None`` to -indicate that the class should not have a table mapped:: - - from sqlalchemy.ext.declarative import declared_attr - - class Tablename: - @declared_attr - def __tablename__(cls): - return cls.__name__.lower() - - class Person(Tablename, Base): - id = Column(Integer, primary_key=True) - discriminator = Column('type', String(50)) - __mapper_args__ = {'polymorphic_on': discriminator} - - class Engineer(Person): - __tablename__ = None - __mapper_args__ = {'polymorphic_identity': 'engineer'} - primary_language = Column(String(50)) - -Alternatively, you can make the mixin intelligent enough to only -return a ``__tablename__`` in the event that no table is already -mapped in the inheritance hierarchy. To help with this, a -:func:`~sqlalchemy.ext.declarative.has_inherited_table` helper -function is provided that returns ``True`` if a parent class already -has a mapped table. - -As an example, here's a mixin that will only allow single table -inheritance:: - - from sqlalchemy.ext.declarative import declared_attr - from sqlalchemy.ext.declarative import has_inherited_table - - class Tablename(object): - @declared_attr - def __tablename__(cls): - if has_inherited_table(cls): - return None - return cls.__name__.lower() - - class Person(Tablename, Base): - id = Column(Integer, primary_key=True) - discriminator = Column('type', String(50)) - __mapper_args__ = {'polymorphic_on': discriminator} - - class Engineer(Person): - primary_language = Column(String(50)) - __mapper_args__ = {'polymorphic_identity': 'engineer'} - - -Combining Table/Mapper Arguments from Multiple Mixins -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -In the case of ``__table_args__`` or ``__mapper_args__`` -specified with declarative mixins, you may want to combine -some parameters from several mixins with those you wish to -define on the class iteself. The -:class:`.declared_attr` decorator can be used -here to create user-defined collation routines that pull -from multiple collections:: - - from sqlalchemy.ext.declarative import declared_attr - - class MySQLSettings(object): - __table_args__ = {'mysql_engine':'InnoDB'} - - class MyOtherMixin(object): - __table_args__ = {'info':'foo'} - - class MyModel(MySQLSettings, MyOtherMixin, Base): - __tablename__='my_model' - - @declared_attr - def __table_args__(cls): - args = dict() - args.update(MySQLSettings.__table_args__) - args.update(MyOtherMixin.__table_args__) - return args - - id = Column(Integer, primary_key=True) - -Creating Indexes with Mixins -~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To define a named, potentially multicolumn :class:`.Index` that applies to all -tables derived from a mixin, use the "inline" form of :class:`.Index` and -establish it as part of ``__table_args__``:: - - class MyMixin(object): - a = Column(Integer) - b = Column(Integer) - - @declared_attr - def __table_args__(cls): - return (Index('test_idx_%s' % cls.__tablename__, 'a', 'b'),) - - class MyModel(MyMixin, Base): - __tablename__ = 'atable' - c = Column(Integer,primary_key=True) - -Special Directives -================== - -``__declare_last__()`` -~~~~~~~~~~~~~~~~~~~~~~ - -The ``__declare_last__()`` hook allows definition of -a class level function that is automatically called by the -:meth:`.MapperEvents.after_configured` event, which occurs after mappings are -assumed to be completed and the 'configure' step has finished:: - - class MyClass(Base): - @classmethod - def __declare_last__(cls): - "" - # do something with mappings - -.. versionadded:: 0.7.3 - -``__declare_first__()`` -~~~~~~~~~~~~~~~~~~~~~~~ - -Like ``__declare_last__()``, but is called at the beginning of mapper configuration -via the :meth:`.MapperEvents.before_configured` event:: - - class MyClass(Base): - @classmethod - def __declare_first__(cls): - "" - # do something before mappings are configured - -.. versionadded:: 0.9.3 - -.. _declarative_abstract: - -``__abstract__`` -~~~~~~~~~~~~~~~~~~~ - -``__abstract__`` causes declarative to skip the production -of a table or mapper for the class entirely. A class can be added within a -hierarchy in the same way as mixin (see :ref:`declarative_mixins`), allowing -subclasses to extend just from the special class:: - - class SomeAbstractBase(Base): - __abstract__ = True - - def some_helpful_method(self): - "" - - @declared_attr - def __mapper_args__(cls): - return {"helpful mapper arguments":True} - - class MyMappedClass(SomeAbstractBase): - "" - -One possible use of ``__abstract__`` is to use a distinct -:class:`.MetaData` for different bases:: - - Base = declarative_base() - - class DefaultBase(Base): - __abstract__ = True - metadata = MetaData() - - class OtherBase(Base): - __abstract__ = True - metadata = MetaData() - -Above, classes which inherit from ``DefaultBase`` will use one -:class:`.MetaData` as the registry of tables, and those which inherit from -``OtherBase`` will use a different one. The tables themselves can then be -created perhaps within distinct databases:: - - DefaultBase.metadata.create_all(some_engine) - OtherBase.metadata_create_all(some_other_engine) - -.. versionadded:: 0.7.3 - -Class Constructor -================= - -As a convenience feature, the :func:`declarative_base` sets a default -constructor on classes which takes keyword arguments, and assigns them -to the named attributes:: - - e = Engineer(primary_language='python') - -Sessions -======== - -Note that ``declarative`` does nothing special with sessions, and is -only intended as an easier way to configure mappers and -:class:`~sqlalchemy.schema.Table` objects. A typical application -setup using :class:`~sqlalchemy.orm.scoping.scoped_session` might look like:: - - engine = create_engine('postgresql://scott:tiger@localhost/test') - Session = scoped_session(sessionmaker(autocommit=False, - autoflush=False, - bind=engine)) - Base = declarative_base() - -Mapped instances then make usage of -:class:`~sqlalchemy.orm.session.Session` in the usual way. - -""" - from .api import declarative_base, synonym_for, comparable_using, \ instrument_declarative, ConcreteBase, AbstractConcreteBase, \ DeclarativeMeta, DeferredReflection, has_inherited_table,\ @@ -1311,6 +12,7 @@ class OtherBase(Base): __all__ = ['declarative_base', 'synonym_for', 'has_inherited_table', - 'comparable_using', 'instrument_declarative', 'declared_attr', - 'ConcreteBase', 'AbstractConcreteBase', 'DeclarativeMeta', - 'DeferredReflection'] + 'comparable_using', 'instrument_declarative', 'declared_attr', + 'as_declarative', + 'ConcreteBase', 'AbstractConcreteBase', 'DeclarativeMeta', + 'DeferredReflection'] diff --git a/lib/sqlalchemy/ext/declarative/api.py b/lib/sqlalchemy/ext/declarative/api.py index 941f02b009..54e78ee1a8 100644 --- a/lib/sqlalchemy/ext/declarative/api.py +++ b/lib/sqlalchemy/ext/declarative/api.py @@ -1,26 +1,28 @@ # ext/declarative/api.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """Public API functions and helpers for declarative.""" -from ...schema import Table, MetaData -from ...orm import synonym as _orm_synonym, mapper,\ - comparable_property,\ - interfaces, properties +from ...schema import Table, MetaData, Column +from ...orm import synonym as _orm_synonym, \ + comparable_property,\ + interfaces, properties, attributes from ...orm.util import polymorphic_union from ...orm.base import _mapper_or_none -from ...util import OrderedDict +from ...util import OrderedDict, hybridmethod, hybridproperty +from ... import util from ... import exc import weakref from .base import _as_declarative, \ - _declarative_constructor,\ - _DeferredMapperConfig, _add_attribute + _declarative_constructor,\ + _DeferredMapperConfig, _add_attribute from .clsregistry import _class_resolver -from . import clsregistry + def instrument_declarative(cls, registry, metadata): """Given a class, configure the class declaratively, @@ -30,8 +32,8 @@ def instrument_declarative(cls, registry, metadata): """ if '_decl_class_registry' in cls.__dict__: raise exc.InvalidRequestError( - "Class %r already has been " - "instrumented declaratively" % cls) + "Class %r already has been " + "instrumented declaratively" % cls) cls._decl_class_registry = registry cls.metadata = metadata _as_declarative(cls, cls.__name__, cls.__dict__) @@ -155,12 +157,90 @@ def __mapper_args__(cls): """ - def __init__(self, fget, *arg, **kw): - super(declared_attr, self).__init__(fget, *arg, **kw) + def __init__(self, fget, cascading=False): + super(declared_attr, self).__init__(fget) self.__doc__ = fget.__doc__ + self._cascading = cascading def __get__(desc, self, cls): - return desc.fget(cls) + reg = cls.__dict__.get('_sa_declared_attr_reg', None) + if reg is None: + manager = attributes.manager_of_class(cls) + if manager is None: + util.warn( + "Unmanaged access of declarative attribute %s from " + "non-mapped class %s" % + (desc.fget.__name__, cls.__name__)) + return desc.fget(cls) + + if reg is None: + return desc.fget(cls) + elif desc in reg: + return reg[desc] + else: + reg[desc] = obj = desc.fget(cls) + return obj + + @hybridmethod + def _stateful(cls, **kw): + return _stateful_declared_attr(**kw) + + @hybridproperty + def cascading(cls): + """Mark a :class:`.declared_attr` as cascading. + + This is a special-use modifier which indicates that a column + or MapperProperty-based declared attribute should be configured + distinctly per mapped subclass, within a mapped-inheritance scenario. + + Below, both MyClass as well as MySubClass will have a distinct + ``id`` Column object established:: + + class HasSomeAttribute(object): + @declared_attr.cascading + def some_id(cls): + if has_inherited_table(cls): + return Column( + ForeignKey('myclass.id'), primary_key=True) + else: + return Column(Integer, primary_key=True) + + return Column('id', Integer, primary_key=True) + + class MyClass(HasSomeAttribute, Base): + "" + # ... + + class MySubClass(MyClass): + "" + # ... + + The behavior of the above configuration is that ``MySubClass`` + will refer to both its own ``id`` column as well as that of + ``MyClass`` underneath the attribute named ``some_id``. + + .. seealso:: + + :ref:`declarative_inheritance` + + :ref:`mixin_inheritance_columns` + + + """ + return cls._stateful(cascading=True) + + +class _stateful_declared_attr(declared_attr): + def __init__(self, **kw): + self.kw = kw + + def _stateful(self, **kw): + new_kw = self.kw.copy() + new_kw.update(kw) + return _stateful_declared_attr(**new_kw) + + def __call__(self, fn): + return declared_attr(fn, **self.kw) def declarative_base(bind=None, metadata=None, mapper=None, cls=object, @@ -244,6 +324,7 @@ def declarative_base(bind=None, metadata=None, mapper=None, cls=object, return metaclass(name, bases, class_dict) + def as_declarative(**kw): """ Class decorator for :func:`.declarative_base`. @@ -281,6 +362,7 @@ def decorate(cls): return decorate + class ConcreteBase(object): """A helper class for 'concrete' declarative mappings. @@ -315,6 +397,15 @@ class Manager(Employee): 'polymorphic_identity':'manager', 'concrete':True} + .. seealso:: + + :class:`.AbstractConcreteBase` + + :ref:`concrete_inheritance` + + :ref:`inheritance_concrete_helpers` + + """ @classmethod @@ -322,7 +413,7 @@ def _create_polymorphic_union(cls, mappers): return polymorphic_union(OrderedDict( (mp.polymorphic_identity, mp.local_table) for mp in mappers - ), 'type', 'pjoin') + ), 'type', 'pjoin') @classmethod def __declare_first__(cls): @@ -345,9 +436,11 @@ class AbstractConcreteBase(ConcreteBase): ``__declare_last__()`` function, which is essentially a hook for the :meth:`.after_configured` event. - :class:`.AbstractConcreteBase` does not produce a mapped - table for the class itself. Compare to :class:`.ConcreteBase`, - which does. + :class:`.AbstractConcreteBase` does produce a mapped class + for the base class, however it is not persisted to any table; it + is instead mapped directly to the "polymorphic" selectable directly + and is only used for selecting. Compare to :class:`.ConcreteBase`, + which does create a persisted table for the base class. Example:: @@ -361,20 +454,79 @@ class Manager(Employee): employee_id = Column(Integer, primary_key=True) name = Column(String(50)) manager_data = Column(String(40)) + __mapper_args__ = { - 'polymorphic_identity':'manager', - 'concrete':True} + 'polymorphic_identity':'manager', + 'concrete':True} + + The abstract base class is handled by declarative in a special way; + at class configuration time, it behaves like a declarative mixin + or an ``__abstract__`` base class. Once classes are configured + and mappings are produced, it then gets mapped itself, but + after all of its decscendants. This is a very unique system of mapping + not found in any other SQLAlchemy system. + + Using this approach, we can specify columns and properties + that will take place on mapped subclasses, in the way that + we normally do as in :ref:`declarative_mixins`:: + + class Company(Base): + __tablename__ = 'company' + id = Column(Integer, primary_key=True) + + class Employee(AbstractConcreteBase, Base): + employee_id = Column(Integer, primary_key=True) + + @declared_attr + def company_id(cls): + return Column(ForeignKey('company.id')) + + @declared_attr + def company(cls): + return relationship("Company") + + class Manager(Employee): + __tablename__ = 'manager' + + name = Column(String(50)) + manager_data = Column(String(40)) + + __mapper_args__ = { + 'polymorphic_identity':'manager', + 'concrete':True} + + When we make use of our mappings however, both ``Manager`` and + ``Employee`` will have an independently usable ``.company`` attribute:: + + session.query(Employee).filter(Employee.company.has(id=5)) + + .. versionchanged:: 1.0.0 - The mechanics of :class:`.AbstractConcreteBase` + have been reworked to support relationships established directly + on the abstract base, without any special configurational steps. + + .. seealso:: + + :class:`.ConcreteBase` + + :ref:`concrete_inheritance` + + :ref:`inheritance_concrete_helpers` """ - __abstract__ = True + __no_table__ = True @classmethod def __declare_first__(cls): - if hasattr(cls, '__mapper__'): + cls._sa_decl_prepare_nocascade() + + @classmethod + def _sa_decl_prepare_nocascade(cls): + if getattr(cls, '__mapper__', None): return - clsregistry.add_class(cls.__name__, cls) + to_map = _DeferredMapperConfig.config_for_cls(cls) + # can't rely on 'self_and_descendants' here # since technically an immediate subclass # might not be mapped, but a subclass @@ -388,11 +540,33 @@ def __declare_first__(cls): if mn is not None: mappers.append(mn) pjoin = cls._create_polymorphic_union(mappers) - cls.__mapper__ = m = mapper(cls, pjoin, polymorphic_on=pjoin.c.type) + + # For columns that were declared on the class, these + # are normally ignored with the "__no_table__" mapping, + # unless they have a different attribute key vs. col name + # and are in the properties argument. + # In that case, ensure we update the properties entry + # to the correct column from the pjoin target table. + declared_cols = set(to_map.declared_columns) + for k, v in list(to_map.properties.items()): + if v in declared_cols: + to_map.properties[k] = pjoin.c[v.key] + + to_map.local_table = pjoin + + m_args = to_map.mapper_args_fn or dict + + def mapper_args(): + args = m_args() + args['polymorphic_on'] = pjoin.c.type + return args + to_map.mapper_args_fn = mapper_args + + m = to_map.map() for scls in cls.__subclasses__(): sm = _mapper_or_none(scls) - if sm.concrete and cls in scls.__bases__: + if sm and sm.concrete and cls in scls.__bases__: sm._set_concrete_base(m) @@ -477,7 +651,7 @@ def prepare(cls, engine): metadata = mapper.class_.metadata for rel in mapper._props.values(): if isinstance(rel, properties.RelationshipProperty) and \ - rel.secondary is not None: + rel.secondary is not None: if isinstance(rel.secondary, Table): cls._reflect_table(rel.secondary, engine) elif isinstance(rel.secondary, _class_resolver): @@ -505,9 +679,9 @@ def _sa_decl_prepare(cls, local_table, engine): @classmethod def _reflect_table(cls, table, engine): Table(table.name, - table.metadata, - extend_existing=True, - autoload_replace=False, - autoload=True, - autoload_with=engine, - schema=table.schema) + table.metadata, + extend_existing=True, + autoload_replace=False, + autoload=True, + autoload_with=engine, + schema=table.schema) diff --git a/lib/sqlalchemy/ext/declarative/base.py b/lib/sqlalchemy/ext/declarative/base.py index eb66f12b6a..3404ce9f47 100644 --- a/lib/sqlalchemy/ext/declarative/base.py +++ b/lib/sqlalchemy/ext/declarative/base.py @@ -1,5 +1,6 @@ # ext/declarative/base.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -18,6 +19,10 @@ from . import clsregistry import collections import weakref +from sqlalchemy.orm import instrumentation + +declared_attr = declarative_props = None + def _declared_mapping_info(cls): # deferred mapping @@ -30,322 +35,432 @@ def _declared_mapping_info(cls): return None -def _as_declarative(cls, classname, dict_): - from .api import declared_attr +def _resolve_for_abstract(cls): + if cls is object: + return None + + if _get_immediate_cls_attr(cls, '__abstract__', strict=True): + for sup in cls.__bases__: + sup = _resolve_for_abstract(sup) + if sup is not None: + return sup + else: + return None + else: + return cls - # dict_ will be a dictproxy, which we can't write to, and we need to! - dict_ = dict(dict_) - column_copies = {} - potential_columns = {} +def _get_immediate_cls_attr(cls, attrname, strict=False): + """return an attribute of the class that is either present directly + on the class, e.g. not on a superclass, or is from a superclass but + this superclass is a mixin, that is, not a descendant of + the declarative base. - mapper_args_fn = None - table_args = inherited_table_args = None - tablename = None + This is used to detect attributes that indicate something about + a mapped class independently from any mapped classes that it may + inherit from. - declarative_props = (declared_attr, util.classproperty) + """ + if not issubclass(cls, object): + return None for base in cls.__mro__: _is_declarative_inherits = hasattr(base, '_decl_class_registry') + if attrname in base.__dict__ and ( + base is cls or + ((base in cls.__bases__ if strict else True) + and not _is_declarative_inherits) + ): + return getattr(base, attrname) + else: + return None - if '__declare_last__' in base.__dict__: - @event.listens_for(mapper, "after_configured") - def go(): - cls.__declare_last__() - if '__declare_first__' in base.__dict__: - @event.listens_for(mapper, "before_configured") - def go(): - cls.__declare_first__() - if '__abstract__' in base.__dict__: - if (base is cls or - (base in cls.__bases__ and not _is_declarative_inherits) - ): - return - class_mapped = _declared_mapping_info(base) is not None +def _as_declarative(cls, classname, dict_): + global declared_attr, declarative_props + if declared_attr is None: + from .api import declared_attr + declarative_props = (declared_attr, util.classproperty) - for name, obj in vars(base).items(): - if name == '__mapper_args__': - if not mapper_args_fn and ( - not class_mapped or - isinstance(obj, declarative_props) - ): - # don't even invoke __mapper_args__ until - # after we've determined everything about the - # mapped table. - mapper_args_fn = lambda: cls.__mapper_args__ - elif name == '__tablename__': - if not tablename and ( - not class_mapped or - isinstance(obj, declarative_props) - ): - tablename = cls.__tablename__ - elif name == '__table_args__': - if not table_args and ( - not class_mapped or - isinstance(obj, declarative_props) - ): - table_args = cls.__table_args__ - if not isinstance(table_args, (tuple, dict, type(None))): - raise exc.ArgumentError( + if _get_immediate_cls_attr(cls, '__abstract__', strict=True): + return + + _MapperConfig.setup_mapping(cls, classname, dict_) + + +class _MapperConfig(object): + + @classmethod + def setup_mapping(cls, cls_, classname, dict_): + defer_map = _get_immediate_cls_attr( + cls_, '_sa_decl_prepare_nocascade', strict=True) or \ + hasattr(cls_, '_sa_decl_prepare') + + if defer_map: + cfg_cls = _DeferredMapperConfig + else: + cfg_cls = _MapperConfig + cfg_cls(cls_, classname, dict_) + + def __init__(self, cls_, classname, dict_): + + self.cls = cls_ + + # dict_ will be a dictproxy, which we can't write to, and we need to! + self.dict_ = dict(dict_) + self.classname = classname + self.mapped_table = None + self.properties = util.OrderedDict() + self.declared_columns = set() + self.column_copies = {} + self._setup_declared_events() + + # temporary registry. While early 1.0 versions + # set up the ClassManager here, by API contract + # we can't do that until there's a mapper. + self.cls._sa_declared_attr_reg = {} + + self._scan_attributes() + + clsregistry.add_class(self.classname, self.cls) + + self._extract_mappable_attributes() + + self._extract_declared_columns() + + self._setup_table() + + self._setup_inheritance() + + self._early_mapping() + + def _early_mapping(self): + self.map() + + def _setup_declared_events(self): + if _get_immediate_cls_attr(self.cls, '__declare_last__'): + @event.listens_for(mapper, "after_configured") + def after_configured(): + self.cls.__declare_last__() + + if _get_immediate_cls_attr(self.cls, '__declare_first__'): + @event.listens_for(mapper, "before_configured") + def before_configured(): + self.cls.__declare_first__() + + def _scan_attributes(self): + cls = self.cls + dict_ = self.dict_ + column_copies = self.column_copies + mapper_args_fn = None + table_args = inherited_table_args = None + tablename = None + + for base in cls.__mro__: + class_mapped = base is not cls and \ + _declared_mapping_info(base) is not None and \ + not _get_immediate_cls_attr( + base, '_sa_decl_prepare_nocascade', strict=True) + + if not class_mapped and base is not cls: + self._produce_column_copies(base) + + for name, obj in vars(base).items(): + if name == '__mapper_args__': + if not mapper_args_fn and ( + not class_mapped or + isinstance(obj, declarative_props) + ): + # don't even invoke __mapper_args__ until + # after we've determined everything about the + # mapped table. + # make a copy of it so a class-level dictionary + # is not overwritten when we update column-based + # arguments. + mapper_args_fn = lambda: dict(cls.__mapper_args__) + elif name == '__tablename__': + if not tablename and ( + not class_mapped or + isinstance(obj, declarative_props) + ): + tablename = cls.__tablename__ + elif name == '__table_args__': + if not table_args and ( + not class_mapped or + isinstance(obj, declarative_props) + ): + table_args = cls.__table_args__ + if not isinstance( + table_args, (tuple, dict, type(None))): + raise exc.ArgumentError( "__table_args__ value must be a tuple, " "dict, or None") - if base is not cls: - inherited_table_args = True - elif class_mapped: - if isinstance(obj, declarative_props): - util.warn("Regular (i.e. not __special__) " - "attribute '%s.%s' uses @declared_attr, " - "but owning class %s is mapped - " - "not applying to subclass %s." - % (base.__name__, name, base, cls)) - continue - elif base is not cls: - # we're a mixin. - if isinstance(obj, Column): - if getattr(cls, name) is not obj: - # if column has been overridden - # (like by the InstrumentedAttribute of the - # superclass), skip + if base is not cls: + inherited_table_args = True + elif class_mapped: + if isinstance(obj, declarative_props): + util.warn("Regular (i.e. not __special__) " + "attribute '%s.%s' uses @declared_attr, " + "but owning class %s is mapped - " + "not applying to subclass %s." + % (base.__name__, name, base, cls)) + continue + elif base is not cls: + # we're a mixin, abstract base, or something that is + # acting like that for now. + if isinstance(obj, Column): + # already copied columns to the mapped class. continue - if obj.foreign_keys: + elif isinstance(obj, MapperProperty): raise exc.InvalidRequestError( + "Mapper properties (i.e. deferred," + "column_property(), relationship(), etc.) must " + "be declared as @declared_attr callables " + "on declarative mixin classes.") + elif isinstance(obj, declarative_props): + oldclassprop = isinstance(obj, util.classproperty) + if not oldclassprop and obj._cascading: + dict_[name] = column_copies[obj] = \ + ret = obj.__get__(obj, cls) + setattr(cls, name, ret) + else: + if oldclassprop: + util.warn_deprecated( + "Use of sqlalchemy.util.classproperty on " + "declarative classes is deprecated.") + dict_[name] = column_copies[obj] = \ + ret = getattr(cls, name) + if isinstance(ret, (Column, MapperProperty)) and \ + ret.doc is None: + ret.doc = obj.__doc__ + + if inherited_table_args and not tablename: + table_args = None + + self.table_args = table_args + self.tablename = tablename + self.mapper_args_fn = mapper_args_fn + + def _produce_column_copies(self, base): + cls = self.cls + dict_ = self.dict_ + column_copies = self.column_copies + # copy mixin columns to the mapped class + for name, obj in vars(base).items(): + if isinstance(obj, Column): + if getattr(cls, name) is not obj: + # if column has been overridden + # (like by the InstrumentedAttribute of the + # superclass), skip + continue + elif obj.foreign_keys: + raise exc.InvalidRequestError( "Columns with foreign keys to other columns " "must be declared as @declared_attr callables " "on declarative mixin classes. ") - if name not in dict_ and not ( - '__table__' in dict_ and - (obj.name or name) in dict_['__table__'].c - ) and name not in potential_columns: - potential_columns[name] = \ - column_copies[obj] = \ - obj.copy() - column_copies[obj]._creation_order = \ - obj._creation_order - elif isinstance(obj, MapperProperty): - raise exc.InvalidRequestError( - "Mapper properties (i.e. deferred," - "column_property(), relationship(), etc.) must " - "be declared as @declared_attr callables " - "on declarative mixin classes.") - elif isinstance(obj, declarative_props): - dict_[name] = ret = \ - column_copies[obj] = getattr(cls, name) - if isinstance(ret, (Column, MapperProperty)) and \ - ret.doc is None: - ret.doc = obj.__doc__ - - # apply inherited columns as we should - for k, v in potential_columns.items(): - dict_[k] = v - - if inherited_table_args and not tablename: - table_args = None - - clsregistry.add_class(classname, cls) - our_stuff = util.OrderedDict() - - for k in list(dict_): - - # TODO: improve this ? all dunders ? - if k in ('__table__', '__tablename__', '__mapper_args__'): - continue - - value = dict_[k] - if isinstance(value, declarative_props): - value = getattr(cls, k) - - elif isinstance(value, QueryableAttribute) and \ - value.class_ is not cls and \ - value.key != k: - # detect a QueryableAttribute that's already mapped being - # assigned elsewhere in userland, turn into a synonym() - value = synonym(value.key) - setattr(cls, k, value) - - - if (isinstance(value, tuple) and len(value) == 1 and - isinstance(value[0], (Column, MapperProperty))): - util.warn("Ignoring declarative-like tuple value of attribute " - "%s: possibly a copy-and-paste error with a comma " - "left at the end of the line?" % k) - continue - if not isinstance(value, (Column, MapperProperty)): - if not k.startswith('__'): - dict_.pop(k) - setattr(cls, k, value) - continue - if k == 'metadata': - raise exc.InvalidRequestError( - "Attribute name 'metadata' is reserved " - "for the MetaData instance when using a " - "declarative base class." - ) - prop = clsregistry._deferred_relationship(cls, value) - our_stuff[k] = prop - - # set up attributes in the order they were created - our_stuff.sort(key=lambda key: our_stuff[key]._creation_order) - - # extract columns from the class dict - declared_columns = set() - name_to_prop_key = collections.defaultdict(set) - for key, c in list(our_stuff.items()): - if isinstance(c, (ColumnProperty, CompositeProperty)): - for col in c.columns: - if isinstance(col, Column) and \ - col.table is None: - _undefer_column_name(key, col) - if not isinstance(c, CompositeProperty): - name_to_prop_key[col.name].add(key) - declared_columns.add(col) - elif isinstance(c, Column): - _undefer_column_name(key, c) - name_to_prop_key[c.name].add(key) - declared_columns.add(c) - # if the column is the same name as the key, - # remove it from the explicit properties dict. - # the normal rules for assigning column-based properties - # will take over, including precedence of columns - # in multi-column ColumnProperties. - if key == c.key: - del our_stuff[key] - - for name, keys in name_to_prop_key.items(): - if len(keys) > 1: - util.warn( - "On class %r, Column object %r named directly multiple times, " - "only one will be used: %s" % - (classname, name, (", ".join(sorted(keys)))) - ) + elif name not in dict_ and not ( + '__table__' in dict_ and + (obj.name or name) in dict_['__table__'].c + ): + column_copies[obj] = copy_ = obj.copy() + copy_._creation_order = obj._creation_order + setattr(cls, name, copy_) + dict_[name] = copy_ - declared_columns = sorted( - declared_columns, key=lambda c: c._creation_order) - table = None + def _extract_mappable_attributes(self): + cls = self.cls + dict_ = self.dict_ - if hasattr(cls, '__table_cls__'): - table_cls = util.unbound_method_to_callable(cls.__table_cls__) - else: - table_cls = Table - - if '__table__' not in dict_: - if tablename is not None: - - args, table_kw = (), {} - if table_args: - if isinstance(table_args, dict): - table_kw = table_args - elif isinstance(table_args, tuple): - if isinstance(table_args[-1], dict): - args, table_kw = table_args[0:-1], table_args[-1] - else: - args = table_args - - autoload = dict_.get('__autoload__') - if autoload: - table_kw['autoload'] = True - - cls.__table__ = table = table_cls( - tablename, cls.metadata, - *(tuple(declared_columns) + tuple(args)), - **table_kw) - else: - table = cls.__table__ - if declared_columns: - for c in declared_columns: - if not table.c.contains_column(c): - raise exc.ArgumentError( - "Can't add additional column %r when " - "specifying __table__" % c.key - ) + our_stuff = self.properties - if hasattr(cls, '__mapper_cls__'): - mapper_cls = util.unbound_method_to_callable(cls.__mapper_cls__) - else: - mapper_cls = mapper + for k in list(dict_): - for c in cls.__bases__: - if _declared_mapping_info(c) is not None: - inherits = c - break - else: - inherits = None + if k in ('__table__', '__tablename__', '__mapper_args__'): + continue - if table is None and inherits is None: - raise exc.InvalidRequestError( - "Class %r does not have a __table__ or __tablename__ " - "specified and does not inherit from an existing " - "table-mapped class." % cls - ) - elif inherits: - inherited_mapper = _declared_mapping_info(inherits) - inherited_table = inherited_mapper.local_table - inherited_mapped_table = inherited_mapper.mapped_table - - if table is None: - # single table inheritance. - # ensure no table args - if table_args: - raise exc.ArgumentError( - "Can't place __table_args__ on an inherited class " - "with no table." - ) - # add any columns declared here to the inherited table. - for c in declared_columns: - if c.primary_key: - raise exc.ArgumentError( - "Can't place primary key columns on an inherited " - "class with no table." - ) - if c.name in inherited_table.c: - if inherited_table.c[c.name] is c: - continue - raise exc.ArgumentError( - "Column '%s' on class %s conflicts with " - "existing column '%s'" % - (c, cls, inherited_table.c[c.name]) - ) - inherited_table.append_column(c) - if inherited_mapped_table is not None and \ - inherited_mapped_table is not inherited_table: - inherited_mapped_table._refresh_for_new_column(c) - - defer_map = hasattr(cls, '_sa_decl_prepare') - if defer_map: - cfg_cls = _DeferredMapperConfig - else: - cfg_cls = _MapperConfig - mt = cfg_cls(mapper_cls, - cls, table, - inherits, - declared_columns, - column_copies, - our_stuff, - mapper_args_fn) - if not defer_map: - mt.map() + value = dict_[k] + if isinstance(value, declarative_props): + value = getattr(cls, k) + elif isinstance(value, QueryableAttribute) and \ + value.class_ is not cls and \ + value.key != k: + # detect a QueryableAttribute that's already mapped being + # assigned elsewhere in userland, turn into a synonym() + value = synonym(value.key) + setattr(cls, k, value) -class _MapperConfig(object): + if (isinstance(value, tuple) and len(value) == 1 and + isinstance(value[0], (Column, MapperProperty))): + util.warn("Ignoring declarative-like tuple value of attribute " + "%s: possibly a copy-and-paste error with a comma " + "left at the end of the line?" % k) + continue + elif not isinstance(value, (Column, MapperProperty)): + # using @declared_attr for some object that + # isn't Column/MapperProperty; remove from the dict_ + # and place the evaluated value onto the class. + if not k.startswith('__'): + dict_.pop(k) + setattr(cls, k, value) + continue + # we expect to see the name 'metadata' in some valid cases; + # however at this point we see it's assigned to something trying + # to be mapped, so raise for that. + elif k == 'metadata': + raise exc.InvalidRequestError( + "Attribute name 'metadata' is reserved " + "for the MetaData instance when using a " + "declarative base class." + ) + prop = clsregistry._deferred_relationship(cls, value) + our_stuff[k] = prop + + def _extract_declared_columns(self): + our_stuff = self.properties + + # set up attributes in the order they were created + our_stuff.sort(key=lambda key: our_stuff[key]._creation_order) + + # extract columns from the class dict + declared_columns = self.declared_columns + name_to_prop_key = collections.defaultdict(set) + for key, c in list(our_stuff.items()): + if isinstance(c, (ColumnProperty, CompositeProperty)): + for col in c.columns: + if isinstance(col, Column) and \ + col.table is None: + _undefer_column_name(key, col) + if not isinstance(c, CompositeProperty): + name_to_prop_key[col.name].add(key) + declared_columns.add(col) + elif isinstance(c, Column): + _undefer_column_name(key, c) + name_to_prop_key[c.name].add(key) + declared_columns.add(c) + # if the column is the same name as the key, + # remove it from the explicit properties dict. + # the normal rules for assigning column-based properties + # will take over, including precedence of columns + # in multi-column ColumnProperties. + if key == c.key: + del our_stuff[key] + + for name, keys in name_to_prop_key.items(): + if len(keys) > 1: + util.warn( + "On class %r, Column object %r named " + "directly multiple times, " + "only one will be used: %s. " + "Consider using orm.synonym instead" % + (self.classname, name, (", ".join(sorted(keys)))) + ) + + def _setup_table(self): + cls = self.cls + tablename = self.tablename + table_args = self.table_args + dict_ = self.dict_ + declared_columns = self.declared_columns - mapped_table = None - - def __init__(self, mapper_cls, - cls, - table, - inherits, - declared_columns, - column_copies, - properties, mapper_args_fn): - self.mapper_cls = mapper_cls - self.cls = cls + declared_columns = self.declared_columns = sorted( + declared_columns, key=lambda c: c._creation_order) + table = None + + if hasattr(cls, '__table_cls__'): + table_cls = util.unbound_method_to_callable(cls.__table_cls__) + else: + table_cls = Table + + if '__table__' not in dict_: + if tablename is not None: + + args, table_kw = (), {} + if table_args: + if isinstance(table_args, dict): + table_kw = table_args + elif isinstance(table_args, tuple): + if isinstance(table_args[-1], dict): + args, table_kw = table_args[0:-1], table_args[-1] + else: + args = table_args + + autoload = dict_.get('__autoload__') + if autoload: + table_kw['autoload'] = True + + cls.__table__ = table = table_cls( + tablename, cls.metadata, + *(tuple(declared_columns) + tuple(args)), + **table_kw) + else: + table = cls.__table__ + if declared_columns: + for c in declared_columns: + if not table.c.contains_column(c): + raise exc.ArgumentError( + "Can't add additional column %r when " + "specifying __table__" % c.key + ) self.local_table = table - self.inherits = inherits - self.properties = properties - self.mapper_args_fn = mapper_args_fn - self.declared_columns = declared_columns - self.column_copies = column_copies + def _setup_inheritance(self): + table = self.local_table + cls = self.cls + table_args = self.table_args + declared_columns = self.declared_columns + for c in cls.__bases__: + c = _resolve_for_abstract(c) + if c is None: + continue + if _declared_mapping_info(c) is not None and \ + not _get_immediate_cls_attr( + c, '_sa_decl_prepare_nocascade', strict=True): + self.inherits = c + break + else: + self.inherits = None + + if table is None and self.inherits is None and \ + not _get_immediate_cls_attr(cls, '__no_table__'): + + raise exc.InvalidRequestError( + "Class %r does not have a __table__ or __tablename__ " + "specified and does not inherit from an existing " + "table-mapped class." % cls + ) + elif self.inherits: + inherited_mapper = _declared_mapping_info(self.inherits) + inherited_table = inherited_mapper.local_table + inherited_mapped_table = inherited_mapper.mapped_table + + if table is None: + # single table inheritance. + # ensure no table args + if table_args: + raise exc.ArgumentError( + "Can't place __table_args__ on an inherited class " + "with no table." + ) + # add any columns declared here to the inherited table. + for c in declared_columns: + if c.primary_key: + raise exc.ArgumentError( + "Can't place primary key columns on an inherited " + "class with no table." + ) + if c.name in inherited_table.c: + if inherited_table.c[c.name] is c: + continue + raise exc.ArgumentError( + "Column '%s' on class %s conflicts with " + "existing column '%s'" % + (c, cls, inherited_table.c[c.name]) + ) + inherited_table.append_column(c) + if inherited_mapped_table is not None and \ + inherited_mapped_table is not inherited_table: + inherited_mapped_table._refresh_for_new_column(c) def _prepare_mapper_arguments(self): properties = self.properties @@ -380,7 +495,7 @@ def _prepare_mapper_arguments(self): set([c.key for c in inherited_table.c if c not in inherited_mapper._columntoproperty]) exclude_properties.difference_update( - [c.key for c in self.declared_columns]) + [c.key for c in self.declared_columns]) # look through columns in the current mapper that # are keyed to a propname different than the colname @@ -399,19 +514,31 @@ def _prepare_mapper_arguments(self): properties[k] = [col] + p.columns result_mapper_args = mapper_args.copy() result_mapper_args['properties'] = properties - return result_mapper_args + self.mapper_args = result_mapper_args def map(self): - mapper_args = self._prepare_mapper_arguments() - self.cls.__mapper__ = self.mapper_cls( + self._prepare_mapper_arguments() + if hasattr(self.cls, '__mapper_cls__'): + mapper_cls = util.unbound_method_to_callable( + self.cls.__mapper_cls__) + else: + mapper_cls = mapper + + self.cls.__mapper__ = mp_ = mapper_cls( self.cls, self.local_table, - **mapper_args + **self.mapper_args ) + del self.cls._sa_declared_attr_reg + return mp_ + class _DeferredMapperConfig(_MapperConfig): _configs = util.OrderedDict() + def _early_mapping(self): + pass + @property def cls(self): return self._cls() @@ -429,32 +556,31 @@ def _remove_config_cls(cls, ref): def has_cls(cls, class_): # 2.6 fails on weakref if class_ is an old style class return isinstance(class_, type) and \ - weakref.ref(class_) in cls._configs + weakref.ref(class_) in cls._configs @classmethod def config_for_cls(cls, class_): return cls._configs[weakref.ref(class_)] - @classmethod def classes_for_base(cls, base_cls, sort=True): classes_for_base = [m for m in cls._configs.values() - if issubclass(m.cls, base_cls)] + if issubclass(m.cls, base_cls)] if not sort: return classes_for_base all_m_by_cls = dict( - (m.cls, m) - for m in classes_for_base - ) + (m.cls, m) + for m in classes_for_base + ) tuples = [] for m_cls in all_m_by_cls: tuples.extend( - (all_m_by_cls[base_cls], all_m_by_cls[m_cls]) - for base_cls in m_cls.__bases__ - if base_cls in all_m_by_cls - ) + (all_m_by_cls[base_cls], all_m_by_cls[m_cls]) + for base_cls in m_cls.__bases__ + if base_cls in all_m_by_cls + ) return list( topological.sort( tuples, @@ -464,7 +590,7 @@ def classes_for_base(cls, base_cls, sort=True): def map(self): self._configs.pop(self._cls, None) - super(_DeferredMapperConfig, self).map() + return super(_DeferredMapperConfig, self).map() def _add_attribute(cls, key, value): diff --git a/lib/sqlalchemy/ext/declarative/clsregistry.py b/lib/sqlalchemy/ext/declarative/clsregistry.py index 8b846746f5..0d62bd2a8a 100644 --- a/lib/sqlalchemy/ext/declarative/clsregistry.py +++ b/lib/sqlalchemy/ext/declarative/clsregistry.py @@ -1,5 +1,6 @@ # ext/declarative/clsregistry.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -10,7 +11,7 @@ """ from ...orm.properties import ColumnProperty, RelationshipProperty, \ - SynonymProperty + SynonymProperty from ...schema import _get_table_key from ...orm import class_mapper, interfaces from ... import util @@ -70,10 +71,12 @@ class _MultipleClassMarker(object): """ + __slots__ = 'on_remove', 'contents', '__weakref__' + def __init__(self, classes, on_remove=None): self.on_remove = on_remove self.contents = set([ - weakref.ref(item, self._remove_item) for item in classes]) + weakref.ref(item, self._remove_item) for item in classes]) _registries.add(self) def __iter__(self): @@ -102,7 +105,12 @@ def _remove_item(self, ref): self.on_remove() def add_item(self, item): - modules = set([cls().__module__ for cls in self.contents]) + # protect against class registration race condition against + # asynchronous garbage collection calling _remove_item, + # [ticket:3208] + modules = set([ + cls.__module__ for cls in + [ref() for ref in self.contents] if cls is not None]) if item.__module__ in modules: util.warn( "This declarative base already contains a class with the " @@ -120,6 +128,9 @@ class _ModuleMarker(object): _decl_class_registry. """ + + __slots__ = 'parent', 'name', 'contents', 'mod_ns', 'path', '__weakref__' + def __init__(self, name, parent): self.parent = parent self.name = name @@ -160,11 +171,13 @@ def add_class(self, name, cls): existing.add_item(cls) else: existing = self.contents[name] = \ - _MultipleClassMarker([cls], - on_remove=lambda: self._remove_item(name)) + _MultipleClassMarker([cls], + on_remove=lambda: self._remove_item(name)) class _ModNS(object): + __slots__ = '__parent', + def __init__(self, parent): self.__parent = parent @@ -181,10 +194,13 @@ def __getattr__(self, key): assert isinstance(value, _MultipleClassMarker) return value.attempt_get(self.__parent.path, key) raise AttributeError("Module %r has no mapped classes " - "registered under the name %r" % (self.__parent.name, key)) + "registered under the name %r" % ( + self.__parent.name, key)) class _GetColumns(object): + __slots__ = 'cls', + def __init__(self, cls): self.cls = cls @@ -193,8 +209,8 @@ def __getattr__(self, key): if mp: if key not in mp.all_orm_descriptors: raise exc.InvalidRequestError( - "Class %r does not have a mapped column named %r" - % (self.cls, key)) + "Class %r does not have a mapped column named %r" + % (self.cls, key)) desc = mp.all_orm_descriptors[key] if desc.extension_type is interfaces.NOT_EXTENSION: @@ -203,24 +219,26 @@ def __getattr__(self, key): key = prop.name elif not isinstance(prop, ColumnProperty): raise exc.InvalidRequestError( - "Property %r is not an instance of" - " ColumnProperty (i.e. does not correspond" - " directly to a Column)." % key) + "Property %r is not an instance of" + " ColumnProperty (i.e. does not correspond" + " directly to a Column)." % key) return getattr(self.cls, key) inspection._inspects(_GetColumns)( - lambda target: inspection.inspect(target.cls)) + lambda target: inspection.inspect(target.cls)) class _GetTable(object): + __slots__ = 'key', 'metadata' + def __init__(self, key, metadata): self.key = key self.metadata = metadata def __getattr__(self, key): return self.metadata.tables[ - _get_table_key(key, self.key) - ] + _get_table_key(key, self.key) + ] def _determine_container(key, value): @@ -247,7 +265,7 @@ def _access_cls(self, key): elif key in cls.metadata._schemas: return _GetTable(key, cls.metadata) elif '_sa_module_registry' in cls._decl_class_registry and \ - key in cls._decl_class_registry['_sa_module_registry']: + key in cls._decl_class_registry['_sa_module_registry']: registry = cls._decl_class_registry['_sa_module_registry'] return registry.resolve_attr(key) elif self._resolvers: @@ -303,7 +321,8 @@ def _deferred_relationship(cls, prop): key, kwargs = prop.backref for attr in ('primaryjoin', 'secondaryjoin', 'secondary', 'foreign_keys', 'remote_side', 'order_by'): - if attr in kwargs and isinstance(kwargs[attr], str): + if attr in kwargs and isinstance(kwargs[attr], + util.string_types): kwargs[attr] = resolve_arg(kwargs[attr]) return prop diff --git a/lib/sqlalchemy/ext/horizontal_shard.py b/lib/sqlalchemy/ext/horizontal_shard.py index 8b3f968dc9..996e81fcab 100644 --- a/lib/sqlalchemy/ext/horizontal_shard.py +++ b/lib/sqlalchemy/ext/horizontal_shard.py @@ -1,5 +1,6 @@ # ext/horizontal_shard.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -43,10 +44,10 @@ def _execute_and_instances(self, context): def iter_for_shard(shard_id): context.attributes['shard_id'] = shard_id result = self._connection_from_session( - mapper=self._mapper_zero(), - shard_id=shard_id).execute( - context.statement, - self._params) + mapper=self._mapper_zero(), + shard_id=shard_id).execute( + context.statement, + self._params) return self.instances(result, context) if self._shard_id is not None: @@ -114,9 +115,11 @@ def connection(self, mapper=None, instance=None, shard_id=None, **kwargs): if self.transaction is not None: return self.transaction.connection(mapper, shard_id=shard_id) else: - return self.get_bind(mapper, - shard_id=shard_id, - instance=instance).contextual_connect(**kwargs) + return self.get_bind( + mapper, + shard_id=shard_id, + instance=instance + ).contextual_connect(**kwargs) def get_bind(self, mapper, shard_id=None, instance=None, clause=None, **kw): diff --git a/lib/sqlalchemy/ext/hybrid.py b/lib/sqlalchemy/ext/hybrid.py index 576e0bd4e0..bbf386742a 100644 --- a/lib/sqlalchemy/ext/hybrid.py +++ b/lib/sqlalchemy/ext/hybrid.py @@ -1,5 +1,6 @@ # ext/hybrid.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -44,8 +45,8 @@ def length(self): return self.end - self.start @hybrid_method - def contains(self,point): - return (self.start <= point) & (point < self.end) + def contains(self, point): + return (self.start <= point) & (point <= self.end) @hybrid_method def intersects(self, other): @@ -144,7 +145,7 @@ def radius(cls): return func.abs(cls.length) / 2 Above the Python function ``abs()`` is used for instance-level -operations, the SQL function ``ABS()`` is used via the :attr:`.func` +operations, the SQL function ``ABS()`` is used via the :data:`.func` object for class-level expressions:: >>> i1.radius @@ -473,8 +474,8 @@ def word_insensitive(self): .. seealso:: `Hybrids and Value Agnostic Types - `_ - - on the techspot.zzzeek.org blog + `_ + - on the techspot.zzzeek.org blog `Value Agnostic Types, Part II `_ - @@ -633,10 +634,10 @@ def operate(self, op, other): from ..orm import attributes, interfaces HYBRID_METHOD = util.symbol('HYBRID_METHOD') -"""Symbol indicating an :class:`_InspectionAttr` that's +"""Symbol indicating an :class:`InspectionAttr` that's of type :class:`.hybrid_method`. - Is assigned to the :attr:`._InspectionAttr.extension_type` + Is assigned to the :attr:`.InspectionAttr.extension_type` attibute. .. seealso:: @@ -646,10 +647,10 @@ def operate(self, op, other): """ HYBRID_PROPERTY = util.symbol('HYBRID_PROPERTY') -"""Symbol indicating an :class:`_InspectionAttr` that's +"""Symbol indicating an :class:`InspectionAttr` that's of type :class:`.hybrid_method`. - Is assigned to the :attr:`._InspectionAttr.extension_type` + Is assigned to the :attr:`.InspectionAttr.extension_type` attibute. .. seealso:: @@ -658,7 +659,8 @@ def operate(self, op, other): """ -class hybrid_method(interfaces._InspectionAttr): + +class hybrid_method(interfaces.InspectionAttrInfo): """A decorator which allows definition of a Python object method with both instance-level and class-level behavior. @@ -701,7 +703,7 @@ def expression(self, expr): return self -class hybrid_property(interfaces._InspectionAttr): +class hybrid_property(interfaces.InspectionAttrInfo): """A decorator which allows definition of a Python descriptor with both instance-level and class-level behavior. @@ -779,7 +781,7 @@ def comparator(self, comparator): """ proxy_attr = attributes.\ - create_proxied_attribute(self) + create_proxied_attribute(self) def expr(owner): return proxy_attr(owner, self.__name__, self, comparator(owner)) diff --git a/lib/sqlalchemy/ext/instrumentation.py b/lib/sqlalchemy/ext/instrumentation.py index 2cf36e9bd9..30a0ab7d73 100644 --- a/lib/sqlalchemy/ext/instrumentation.py +++ b/lib/sqlalchemy/ext/instrumentation.py @@ -105,7 +105,7 @@ def _locate_extended_factory(self, class_): def _check_conflicts(self, class_, factory): existing_factories = self._collect_management_factories_for(class_).\ - difference([factory]) + difference([factory]) if existing_factories: raise TypeError( "multiple instrumentation implementations specified " @@ -166,7 +166,13 @@ def unregister(self, class_): def manager_of_class(self, cls): if cls is None: return None - return self._manager_finders.get(cls, _default_manager_getter)(cls) + try: + finder = self._manager_finders.get(cls, _default_manager_getter) + except TypeError: + # due to weakref lookup on invalid object + return None + else: + return finder(cls) def state_of(self, instance): if instance is None: @@ -182,7 +188,7 @@ def dict_of(self, instance): orm_instrumentation._instrumentation_factory = \ - _instrumentation_factory = ExtendedInstrumentationRegistry() + _instrumentation_factory = ExtendedInstrumentationRegistry() orm_instrumentation.instrumentation_finders = instrumentation_finders @@ -316,7 +322,7 @@ def initialize_collection(self, key, state, factory): return delegate(key, state, factory) else: return ClassManager.initialize_collection(self, key, - state, factory) + state, factory) def new_instance(self, state=None): instance = self.class_.__new__(self.class_) @@ -392,6 +398,7 @@ def _reinstall_default_lookups(): manager_of_class=_default_manager_getter ) ) + _instrumentation_factory._extended = False def _install_lookups(lookups): diff --git a/lib/sqlalchemy/ext/mutable.py b/lib/sqlalchemy/ext/mutable.py index 7869e888cc..97f720cb4c 100644 --- a/lib/sqlalchemy/ext/mutable.py +++ b/lib/sqlalchemy/ext/mutable.py @@ -1,5 +1,6 @@ # ext/mutable.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -401,6 +402,27 @@ def coerce(cls, key, value): msg = "Attribute '%s' does not accept objects of type %s" raise ValueError(msg % (key, type(value))) + @classmethod + def _get_listen_keys(cls, attribute): + """Given a descriptor attribute, return a ``set()`` of the attribute + keys which indicate a change in the state of this attribute. + + This is normally just ``set([attribute.key])``, but can be overridden + to provide for additional keys. E.g. a :class:`.MutableComposite` + augments this set with the attribute keys associated with the columns + that comprise the composite value. + + This collection is consulted in the case of intercepting the + :meth:`.InstanceEvents.refresh` and + :meth:`.InstanceEvents.refresh_flush` events, which pass along a list + of attribute names that have been refreshed; the list is compared + against this set to determine if action needs to be taken. + + .. versionadded:: 1.0.5 + + """ + return set([attribute.key]) + @classmethod def _listen_on_attribute(cls, attribute, coerce, parent_cls): """Establish this type as a mutation listener for the given @@ -414,6 +436,8 @@ def _listen_on_attribute(cls, attribute, coerce, parent_cls): # rely on "propagate" here parent_cls = attribute.class_ + listen_keys = cls._get_listen_keys(attribute) + def load(state, *args): """Listen for objects loaded or refreshed. @@ -428,6 +452,10 @@ def load(state, *args): state.dict[key] = val val._parents[state.obj()] = key + def load_attrs(state, ctx, attrs): + if not attrs or listen_keys.intersection(attrs): + load(state) + def set(target, value, oldvalue, initiator): """Listen for set/replace events on the target data member. @@ -461,15 +489,17 @@ def unpickle(state, state_dict): val._parents[state.obj()] = key event.listen(parent_cls, 'load', load, - raw=True, propagate=True) - event.listen(parent_cls, 'refresh', load, - raw=True, propagate=True) + raw=True, propagate=True) + event.listen(parent_cls, 'refresh', load_attrs, + raw=True, propagate=True) + event.listen(parent_cls, 'refresh_flush', load_attrs, + raw=True, propagate=True) event.listen(attribute, 'set', set, - raw=True, retval=True, propagate=True) + raw=True, retval=True, propagate=True) event.listen(parent_cls, 'pickle', pickle, - raw=True, propagate=True) + raw=True, propagate=True) event.listen(parent_cls, 'unpickle', unpickle, - raw=True, propagate=True) + raw=True, propagate=True) class Mutable(MutableBase): @@ -564,7 +594,6 @@ def listen_for_type(mapper, class_): return sqltype - class MutableComposite(MutableBase): """Mixin that defines transparent propagation of change events on a SQLAlchemy "composite" object to its @@ -574,6 +603,10 @@ class MutableComposite(MutableBase): """ + @classmethod + def _get_listen_keys(cls, attribute): + return set([attribute.key]).union(attribute.property._attribute_keys) + def changed(self): """Subclasses should call this method whenever change events occur.""" @@ -581,16 +614,17 @@ def changed(self): prop = object_mapper(parent).get_property(key) for value, attr_name in zip( - self.__composite_values__(), - prop._attribute_keys): + self.__composite_values__(), + prop._attribute_keys): setattr(parent, attr_name, value) + def _setup_composite_listener(): def _listen_for_type(mapper, class_): for prop in mapper.iterate_properties: if (hasattr(prop, 'composite_class') and - isinstance(prop.composite_class, type) and - issubclass(prop.composite_class, MutableComposite)): + isinstance(prop.composite_class, type) and + issubclass(prop.composite_class, MutableComposite)): prop.composite_class._listen_on_attribute( getattr(class_, prop.key), False, class_) if not event.contains(Mapper, "mapper_configured", _listen_for_type): @@ -601,6 +635,18 @@ def _listen_for_type(mapper, class_): class MutableDict(Mutable, dict): """A dictionary type that implements :class:`.Mutable`. + The :class:`.MutableDict` object implements a dictionary that will + emit change events to the underlying mapping when the contents of + the dictionary are altered, including when values are added or removed. + + Note that :class:`.MutableDict` does **not** apply mutable tracking to the + *values themselves* inside the dictionary. Therefore it is not a sufficient + solution for the use case of tracking deep changes to a *recursive* + dictionary structure, such as a JSON structure. To support this use case, + build a subclass of :class:`.MutableDict` that provides appropriate + coersion to the values placed in the dictionary so that they too are + "mutable", and emit events up to their parent structure. + .. versionadded:: 0.8 """ @@ -610,21 +656,40 @@ def __setitem__(self, key, value): dict.__setitem__(self, key, value) self.changed() + def setdefault(self, key, value): + result = dict.setdefault(self, key, value) + self.changed() + return result + def __delitem__(self, key): """Detect dictionary del events and emit change events.""" dict.__delitem__(self, key) self.changed() + def update(self, *a, **kw): + dict.update(self, *a, **kw) + self.changed() + + def pop(self, *arg): + result = dict.pop(self, *arg) + self.changed() + return result + + def popitem(self): + result = dict.popitem(self) + self.changed() + return result + def clear(self): dict.clear(self) self.changed() @classmethod def coerce(cls, key, value): - """Convert plain dictionary to MutableDict.""" - if not isinstance(value, MutableDict): + """Convert plain dictionary to instance of this class.""" + if not isinstance(value, cls): if isinstance(value, dict): - return MutableDict(value) + return cls(value) return Mutable.coerce(key, value) else: return value diff --git a/lib/sqlalchemy/ext/orderinglist.py b/lib/sqlalchemy/ext/orderinglist.py index c4ba6d571c..d060a4f03f 100644 --- a/lib/sqlalchemy/ext/orderinglist.py +++ b/lib/sqlalchemy/ext/orderinglist.py @@ -1,5 +1,6 @@ # ext/orderinglist.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -82,11 +83,11 @@ class Bullet(Base): s.bullets[2].position >>> 2 -The :class:`.OrderingList` construct only works with **changes** to a collection, -and not the initial load from the database, and requires that the list be -sorted when loaded. Therefore, be sure to -specify ``order_by`` on the :func:`.relationship` against the target ordering -attribute, so that the ordering is correct when first loaded. +The :class:`.OrderingList` construct only works with **changes** to a +collection, and not the initial load from the database, and requires that the +list be sorted when loaded. Therefore, be sure to specify ``order_by`` on the +:func:`.relationship` against the target ordering attribute, so that the +ordering is correct when first loaded. .. warning:: @@ -103,22 +104,22 @@ class Bullet(Base): SQLAlchemy's unit of work performs all INSERTs before DELETEs within a single flush. In the case of a primary key, it will trade an INSERT/DELETE of the same primary key for an UPDATE statement in order - to lessen the impact of this lmitation, however this does not take place + to lessen the impact of this limitation, however this does not take place for a UNIQUE column. A future feature will allow the "DELETE before INSERT" behavior to be possible, allevating this limitation, though this feature will require explicit configuration at the mapper level for sets of columns that are to be handled in this way. -:func:`.ordering_list` takes the name of the related object's ordering attribute as -an argument. By default, the zero-based integer index of the object's -position in the :func:`.ordering_list` is synchronized with the ordering attribute: -index 0 will get position 0, index 1 position 1, etc. To start numbering at 1 -or some other integer, provide ``count_from=1``. +:func:`.ordering_list` takes the name of the related object's ordering +attribute as an argument. By default, the zero-based integer index of the +object's position in the :func:`.ordering_list` is synchronized with the +ordering attribute: index 0 will get position 0, index 1 position 1, etc. To +start numbering at 1 or some other integer, provide ``count_from=1``. """ -from ..orm.collections import collection +from ..orm.collections import collection, collection_adapter from .. import util __all__ = ['ordering_list'] @@ -318,7 +319,10 @@ def insert(self, index, entity): def remove(self, entity): super(OrderingList, self).remove(entity) - self._reorder() + + adapter = collection_adapter(self) + if adapter and adapter._referenced_by_owner: + self._reorder() def pop(self, index=-1): entity = super(OrderingList, self).pop(index) @@ -358,7 +362,7 @@ def __reduce__(self): for func_name, func in list(locals().items()): if (util.callable(func) and func.__name__ == func_name and - not func.__doc__ and hasattr(list, func_name)): + not func.__doc__ and hasattr(list, func_name)): func.__doc__ = getattr(list, func_name).__doc__ del func_name, func diff --git a/lib/sqlalchemy/ext/serializer.py b/lib/sqlalchemy/ext/serializer.py index 388cd40482..893f7be227 100644 --- a/lib/sqlalchemy/ext/serializer.py +++ b/lib/sqlalchemy/ext/serializer.py @@ -1,5 +1,6 @@ # ext/serializer.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -21,7 +22,8 @@ # ... define mappers - query = Session.query(MyClass).filter(MyClass.somedata=='foo').order_by(MyClass.sortkey) + query = Session.query(MyClass). + filter(MyClass.somedata=='foo').order_by(MyClass.sortkey) # pickle the query serialized = dumps(query) @@ -69,7 +71,7 @@ def Serializer(*args, **kw): pickler = pickle.Pickler(*args, **kw) def persistent_id(obj): - #print "serializing:", repr(obj) + # print "serializing:", repr(obj) if isinstance(obj, QueryableAttribute): cls = obj.impl.class_ key = obj.impl.key @@ -78,11 +80,12 @@ def persistent_id(obj): id = "mapper:" + b64encode(pickle.dumps(obj.class_)) elif isinstance(obj, MapperProperty) and not obj.parent.non_primary: id = "mapperprop:" + b64encode(pickle.dumps(obj.parent.class_)) + \ - ":" + obj.key + ":" + obj.key elif isinstance(obj, Table): id = "table:" + text_type(obj.key) elif isinstance(obj, Column) and isinstance(obj.table, Table): - id = "column:" + text_type(obj.table.key) + ":" + text_type(obj.key) + id = "column:" + \ + text_type(obj.table.key) + ":" + text_type(obj.key) elif isinstance(obj, Session): id = "session:" elif isinstance(obj, Engine): @@ -95,7 +98,7 @@ def persistent_id(obj): return pickler our_ids = re.compile( - r'(mapperprop|mapper|table|column|session|attribute|engine):(.*)') + r'(mapperprop|mapper|table|column|session|attribute|engine):(.*)') def Deserializer(file, metadata=None, scoped_session=None, engine=None): diff --git a/lib/sqlalchemy/inspection.py b/lib/sqlalchemy/inspection.py index fe9e405558..5c16c45c92 100644 --- a/lib/sqlalchemy/inspection.py +++ b/lib/sqlalchemy/inspection.py @@ -1,5 +1,6 @@ # sqlalchemy/inspect.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -67,7 +68,7 @@ def inspect(subject, raiseerr=True): if raiseerr and ( reg is None or ret is None - ): + ): raise exc.NoInspectionAvailable( "No inspection system is " "available for object of type %s" % @@ -80,8 +81,8 @@ def decorate(fn_or_cls): for type_ in types: if type_ in _registrars: raise AssertionError( - "Type %s is already " - "registered" % type_) + "Type %s is already " + "registered" % type_) _registrars[type_] = fn_or_cls return fn_or_cls return decorate diff --git a/lib/sqlalchemy/interfaces.py b/lib/sqlalchemy/interfaces.py index ed50a6456b..464ad9f707 100644 --- a/lib/sqlalchemy/interfaces.py +++ b/lib/sqlalchemy/interfaces.py @@ -1,5 +1,6 @@ # sqlalchemy/interfaces.py -# Copyright (C) 2007-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2007-2016 the SQLAlchemy authors and contributors +# # Copyright (C) 2007 Jason Kirtland jek@discorporate.us # # This module is part of SQLAlchemy and is released under @@ -79,8 +80,9 @@ def _adapt_listener(cls, self, listener): """ - listener = util.as_interface(listener, methods=('connect', - 'first_connect', 'checkout', 'checkin')) + listener = util.as_interface(listener, + methods=('connect', 'first_connect', + 'checkout', 'checkin')) if hasattr(listener, 'connect'): event.listen(self, 'connect', listener.connect) if hasattr(listener, 'first_connect'): @@ -205,7 +207,7 @@ def execute_wrapper( statement, parameters, context, - ): + ): return statement, parameters return listener.cursor_execute( diff --git a/lib/sqlalchemy/log.py b/lib/sqlalchemy/log.py index 935761d5f5..b23de9014f 100644 --- a/lib/sqlalchemy/log.py +++ b/lib/sqlalchemy/log.py @@ -1,5 +1,6 @@ # sqlalchemy/log.py -# Copyright (C) 2006-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2006-2016 the SQLAlchemy authors and contributors +# # Includes alterations by Vinay Sajip vinay_sajip@yahoo.co.uk # # This module is part of SQLAlchemy and is released under @@ -21,7 +22,7 @@ import sys # set initial level to WARN. This so that -# log statements don't occur in the absense of explicit +# log statements don't occur in the absence of explicit # logging being enabled for 'sqlalchemy'. rootlogger = logging.getLogger('sqlalchemy') if rootlogger.level == logging.NOTSET: @@ -46,6 +47,7 @@ def class_logger(cls): _logged_classes.add(cls) return cls + class Identified(object): logging_name = None @@ -90,7 +92,7 @@ def __init__(self, echo, name): # if echo flag is enabled and no handlers, # add a handler to the list if self._echo_map[echo] <= logging.INFO \ - and not self.logger.handlers: + and not self.logger.handlers: _add_default_handler(self.logger) # @@ -173,10 +175,11 @@ def instance_logger(instance, echoflag=None): if instance.logging_name: name = "%s.%s.%s" % (instance.__class__.__module__, - instance.__class__.__name__, instance.logging_name) + instance.__class__.__name__, + instance.logging_name) else: name = "%s.%s" % (instance.__class__.__module__, - instance.__class__.__name__) + instance.__class__.__name__) instance._echo = echoflag diff --git a/lib/sqlalchemy/orm/__init__.py b/lib/sqlalchemy/orm/__init__.py index 7825a70acf..7425737cef 100644 --- a/lib/sqlalchemy/orm/__init__.py +++ b/lib/sqlalchemy/orm/__init__.py @@ -1,5 +1,6 @@ # orm/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -14,40 +15,40 @@ from . import exc from .mapper import ( - Mapper, - _mapper_registry, - class_mapper, - configure_mappers, - reconstructor, - validates - ) + Mapper, + _mapper_registry, + class_mapper, + configure_mappers, + reconstructor, + validates +) from .interfaces import ( - EXT_CONTINUE, - EXT_STOP, - PropComparator, - ) + EXT_CONTINUE, + EXT_STOP, + PropComparator, +) from .deprecated_interfaces import ( - MapperExtension, - SessionExtension, - AttributeExtension, + MapperExtension, + SessionExtension, + AttributeExtension, ) from .util import ( - aliased, - join, - object_mapper, - outerjoin, - polymorphic_union, - was_deleted, - with_parent, - with_polymorphic, - ) + aliased, + join, + object_mapper, + outerjoin, + polymorphic_union, + was_deleted, + with_parent, + with_polymorphic, +) from .properties import ColumnProperty from .relationships import RelationshipProperty from .descriptor_props import ( - ComparableProperty, - CompositeProperty, - SynonymProperty, - ) + ComparableProperty, + CompositeProperty, + SynonymProperty, +) from .relationships import ( foreign, remote, @@ -56,7 +57,8 @@ Session, object_session, sessionmaker, - make_transient + make_transient, + make_transient_to_detached ) from .scoping import ( scoped_session @@ -67,6 +69,7 @@ from .. import util as _sa_util from . import strategies as _strategies + def create_session(bind=None, **kwargs): """Create a new :class:`.Session` with no automation enabled by default. @@ -105,6 +108,7 @@ def create_session(bind=None, **kwargs): relationship = public_factory(RelationshipProperty, ".orm.relationship") + def relation(*arg, **kw): """A synonym for :func:`relationship`.""" @@ -142,9 +146,15 @@ def backref(name, **kwargs): Used with the ``backref`` keyword argument to :func:`relationship` in place of a string argument, e.g.:: - 'items':relationship(SomeItem, backref=backref('parent', lazy='subquery')) + 'items':relationship( + SomeItem, backref=backref('parent', lazy='subquery')) + + .. seealso:: + + :ref:`relationships_backref` """ + return (name, kwargs) @@ -156,7 +166,8 @@ def deferred(*columns, **kw): :class:`.Column` object, however a collection is supported in order to support multiple columns mapped under the same attribute. - :param \**kw: additional keyword arguments passed to :class:`.ColumnProperty`. + :param \**kw: additional keyword arguments passed to + :class:`.ColumnProperty`. .. seealso:: @@ -171,11 +182,11 @@ def deferred(*columns, **kw): synonym = public_factory(SynonymProperty, ".orm.synonym") comparable_property = public_factory(ComparableProperty, - ".orm.comparable_property") + ".orm.comparable_property") @_sa_util.deprecated("0.7", message=":func:`.compile_mappers` " - "is renamed to :func:`.configure_mappers`") + "is renamed to :func:`.configure_mappers`") def compile_mappers(): """Initialize the inter-mapper relationships of all mappers that have been defined. @@ -194,14 +205,14 @@ def clear_mappers(): :func:`.clear_mappers` is *not* for normal use, as there is literally no valid usage for it outside of very specific testing scenarios. Normally, mappers are permanent structural components of user-defined classes, and - are never discarded independently of their class. If a mapped class itself - is garbage collected, its mapper is automatically disposed of as well. As - such, :func:`.clear_mappers` is only for usage in test suites that re-use - the same classes with different mappings, which is itself an extremely rare - use case - the only such use case is in fact SQLAlchemy's own test suite, - and possibly the test suites of other ORM extension libraries which - intend to test various combinations of mapper construction upon a fixed - set of classes. + are never discarded independently of their class. If a mapped class + itself is garbage collected, its mapper is automatically disposed of as + well. As such, :func:`.clear_mappers` is only for usage in test suites + that re-use the same classes with different mappings, which is itself an + extremely rare use case - the only such use case is in fact SQLAlchemy's + own test suite, and possibly the test suites of other ORM extension + libraries which intend to test various combinations of mapper construction + upon a fixed set of classes. """ mapperlib._CONFIGURE_MUTEX.acquire() @@ -235,6 +246,7 @@ def clear_mappers(): from .strategy_options import Load + def eagerload(*args, **kwargs): """A synonym for :func:`joinedload()`.""" return joinedload(*args, **kwargs) @@ -245,12 +257,9 @@ def eagerload_all(*args, **kwargs): return joinedload_all(*args, **kwargs) - - contains_alias = public_factory(AliasOption, ".orm.contains_alias") - def __go(lcls): global __all__ from .. import util as sa_util @@ -259,9 +268,8 @@ def __go(lcls): import inspect as _inspect __all__ = sorted(name for name, obj in lcls.items() - if not (name.startswith('_') or _inspect.ismodule(obj))) + if not (name.startswith('_') or _inspect.ismodule(obj))) _sa_util.dependencies.resolve_all("sqlalchemy.orm") __go(locals()) - diff --git a/lib/sqlalchemy/orm/attributes.py b/lib/sqlalchemy/orm/attributes.py index 3a786c73d4..16b326439e 100644 --- a/lib/sqlalchemy/orm/attributes.py +++ b/lib/sqlalchemy/orm/attributes.py @@ -1,5 +1,6 @@ # orm/attributes.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -20,16 +21,17 @@ from .base import instance_state, instance_dict, manager_of_class from .base import PASSIVE_NO_RESULT, ATTR_WAS_SET, ATTR_EMPTY, NO_VALUE,\ - NEVER_SET, NO_CHANGE, CALLABLES_OK, SQL_OK, RELATED_OBJECT_OK,\ - INIT_OK, NON_PERSISTENT_OK, LOAD_AGAINST_COMMITTED, PASSIVE_OFF,\ - PASSIVE_RETURN_NEVER_SET, PASSIVE_NO_INITIALIZE, PASSIVE_NO_FETCH,\ - PASSIVE_NO_FETCH_RELATED, PASSIVE_ONLY_PERSISTENT, NO_AUTOFLUSH + NEVER_SET, NO_CHANGE, CALLABLES_OK, SQL_OK, RELATED_OBJECT_OK,\ + INIT_OK, NON_PERSISTENT_OK, LOAD_AGAINST_COMMITTED, PASSIVE_OFF,\ + PASSIVE_RETURN_NEVER_SET, PASSIVE_NO_INITIALIZE, PASSIVE_NO_FETCH,\ + PASSIVE_NO_FETCH_RELATED, PASSIVE_ONLY_PERSISTENT, NO_AUTOFLUSH from .base import state_str, instance_str + @inspection._self_inspects class QueryableAttribute(interfaces._MappedAttribute, - interfaces._InspectionAttr, - interfaces.PropComparator): + interfaces.InspectionAttr, + interfaces.PropComparator): """Base class for :term:`descriptor` objects that intercept attribute events on behalf of a :class:`.MapperProperty` object. The actual :class:`.MapperProperty` is accessible @@ -51,8 +53,8 @@ class QueryableAttribute(interfaces._MappedAttribute, is_attribute = True def __init__(self, class_, key, impl=None, - comparator=None, parententity=None, - of_type=None): + comparator=None, parententity=None, + of_type=None): self.class_ = class_ self.key = key self.impl = impl @@ -75,13 +77,12 @@ def _supports_population(self): def get_history(self, instance, passive=PASSIVE_OFF): return self.impl.get_history(instance_state(instance), - instance_dict(instance), passive) + instance_dict(instance), passive) def __selectable__(self): # TODO: conditionally attach this method based on clause_element ? return self - @util.memoized_property def info(self): """Return the 'info' dictionary for the underlying SQL element. @@ -96,22 +97,23 @@ def info(self): * If the attribute is a :class:`.ColumnProperty` but is mapped to any other kind of SQL expression other than a :class:`.Column`, - the attribute will refer to the :attr:`.MapperProperty.info` dictionary - associated directly with the :class:`.ColumnProperty`, assuming the SQL - expression itself does not have it's own ``.info`` attribute - (which should be the case, unless a user-defined SQL construct - has defined one). - - * If the attribute refers to any other kind of :class:`.MapperProperty`, - including :class:`.RelationshipProperty`, the attribute will refer - to the :attr:`.MapperProperty.info` dictionary associated with - that :class:`.MapperProperty`. - - * To access the :attr:`.MapperProperty.info` dictionary of the :class:`.MapperProperty` - unconditionally, including for a :class:`.ColumnProperty` that's - associated directly with a :class:`.schema.Column`, the attribute - can be referred to using :attr:`.QueryableAttribute.property` - attribute, as ``MyClass.someattribute.property.info``. + the attribute will refer to the :attr:`.MapperProperty.info` + dictionary associated directly with the :class:`.ColumnProperty`, + assuming the SQL expression itself does not have its own ``.info`` + attribute (which should be the case, unless a user-defined SQL + construct has defined one). + + * If the attribute refers to any other kind of + :class:`.MapperProperty`, including :class:`.RelationshipProperty`, + the attribute will refer to the :attr:`.MapperProperty.info` + dictionary associated with that :class:`.MapperProperty`. + + * To access the :attr:`.MapperProperty.info` dictionary of the + :class:`.MapperProperty` unconditionally, including for a + :class:`.ColumnProperty` that's associated directly with a + :class:`.schema.Column`, the attribute can be referred to using + :attr:`.QueryableAttribute.property` attribute, as + ``MyClass.someattribute.property.info``. .. versionadded:: 0.8.0 @@ -151,18 +153,20 @@ def _query_clause_element(self): def adapt_to_entity(self, adapt_to_entity): assert not self._of_type - return self.__class__(adapt_to_entity.entity, self.key, impl=self.impl, - comparator=self.comparator.adapt_to_entity(adapt_to_entity), - parententity=adapt_to_entity) + return self.__class__(adapt_to_entity.entity, + self.key, impl=self.impl, + comparator=self.comparator.adapt_to_entity( + adapt_to_entity), + parententity=adapt_to_entity) def of_type(self, cls): return QueryableAttribute( - self.class_, - self.key, - self.impl, - self.comparator.of_type(cls), - self._parententity, - of_type=cls) + self.class_, + self.key, + self.impl, + self.comparator.of_type(cls), + self._parententity, + of_type=cls) def label(self, name): return self._query_clause_element().label(name) @@ -181,8 +185,8 @@ def __getattr__(self, key): return getattr(self.comparator, key) except AttributeError: raise AttributeError( - 'Neither %r object nor %r object associated with %s ' - 'has an attribute %r' % ( + 'Neither %r object nor %r object associated with %s ' + 'has an attribute %r' % ( type(self).__name__, type(self.comparator).__name__, self, @@ -217,7 +221,7 @@ class InstrumentedAttribute(QueryableAttribute): def __set__(self, instance, value): self.impl.set(instance_state(instance), - instance_dict(instance), value, None) + instance_dict(instance), value, None) def __delete__(self, instance): self.impl.delete(instance_state(instance), instance_dict(instance)) @@ -251,9 +255,9 @@ class Proxy(QueryableAttribute): """ def __init__(self, class_, key, descriptor, - comparator, - adapt_to_entity=None, doc=None, - original_property=None): + comparator, + adapt_to_entity=None, doc=None, + original_property=None): self.class_ = class_ self.key = key self.descriptor = descriptor @@ -272,13 +276,15 @@ def comparator(self): self._comparator = self._comparator() if self._adapt_to_entity: self._comparator = self._comparator.adapt_to_entity( - self._adapt_to_entity) + self._adapt_to_entity) return self._comparator def adapt_to_entity(self, adapt_to_entity): - return self.__class__(adapt_to_entity.entity, self.key, self.descriptor, - self._comparator, - adapt_to_entity) + return self.__class__(adapt_to_entity.entity, + self.key, + self.descriptor, + self._comparator, + adapt_to_entity) def __get__(self, instance, owner): if instance is None: @@ -302,10 +308,10 @@ def __getattr__(self, attribute): raise AttributeError( 'Neither %r object nor %r object associated with %s ' 'has an attribute %r' % ( - type(descriptor).__name__, - type(self.comparator).__name__, - self, - attribute) + type(descriptor).__name__, + type(self.comparator).__name__, + self, + attribute) ) Proxy.__name__ = type(descriptor).__name__ + 'Proxy' @@ -319,6 +325,7 @@ def __getattr__(self, attribute): OP_APPEND = util.symbol("APPEND") OP_REPLACE = util.symbol("REPLACE") + class Event(object): """A token propagated throughout the course of a chain of attribute events. @@ -338,23 +345,25 @@ class Event(object): .. versionadded:: 0.9.0 - """ - - impl = None - """The :class:`.AttributeImpl` which is the current event initiator. - """ + :var impl: The :class:`.AttributeImpl` which is the current event + initiator. - op = None - """The symbol :attr:`.OP_APPEND`, :attr:`.OP_REMOVE` or :attr:`.OP_REPLACE`, - indicating the source operation. + :var op: The symbol :attr:`.OP_APPEND`, :attr:`.OP_REMOVE` or + :attr:`.OP_REPLACE`, indicating the source operation. """ + __slots__ = 'impl', 'op', 'parent_token' + def __init__(self, attribute_impl, op): self.impl = attribute_impl self.op = op self.parent_token = self.impl.parent_token + def __eq__(self, other): + return isinstance(other, Event) and \ + other.impl is self.impl and \ + other.op == self.op @property def key(self): @@ -363,15 +372,16 @@ def key(self): def hasparent(self, state): return self.impl.hasparent(state) + class AttributeImpl(object): """internal implementation for instrumented attributes.""" def __init__(self, class_, key, - callable_, dispatch, trackparent=False, extension=None, - compare_function=None, active_history=False, - parent_token=None, expire_missing=True, - send_modified_events=True, - **kwargs): + callable_, dispatch, trackparent=False, extension=None, + compare_function=None, active_history=False, + parent_token=None, expire_missing=True, + send_modified_events=True, + **kwargs): """Construct an AttributeImpl. \class_ @@ -415,8 +425,8 @@ def __init__(self, class_, key, for this key. send_modified_events - if False, the InstanceState._modified_event method will have no effect; - this means the attribute will never show up as changed in a + if False, the InstanceState._modified_event method will have no + effect; this means the attribute will never show up as changed in a history entry. """ self.class_ = class_ @@ -443,6 +453,11 @@ def __init__(self, class_, key, self.expire_missing = expire_missing + __slots__ = ( + 'class_', 'key', 'callable_', 'dispatch', 'trackparent', + 'parent_token', 'send_modified_events', 'is_equal', 'expire_missing' + ) + def __str__(self): return "%s.%s" % (self.class_.__name__, self.key) @@ -476,7 +491,7 @@ def hasparent(self, state, optimistic=False): assert self.trackparent, msg return state.parents.get(id(self.parent_token), optimistic) \ - is not False + is not False def sethasparent(self, state, parent_state, value): """Set a boolean flag on the given item corresponding to @@ -495,7 +510,7 @@ def sethasparent(self, state, parent_state, value): last_parent = state.parents[id_] if last_parent is not False and \ - last_parent.key != parent_state.key: + last_parent.key != parent_state.key: if last_parent.obj() is None: raise orm_exc.StaleDataError( @@ -505,34 +520,17 @@ def sethasparent(self, state, parent_state, value): "has gone stale, can't be sure this " "is the most recent parent." % (state_str(state), - state_str(parent_state), - self.key)) + state_str(parent_state), + self.key)) return state.parents[id_] = False - def set_callable(self, state, callable_): - """Set a callable function for this attribute on the given object. - - This callable will be executed when the attribute is next - accessed, and is assumed to construct part of the instances - previously stored state. When its value or values are loaded, - they will be established as part of the instance's *committed - state*. While *trackparent* information will be assembled for - these instances, attribute-level event handlers will not be - fired. - - The callable overrides the class level callable set in the - ``InstrumentedAttribute`` constructor. - - """ - state.callables[self.key] = callable_ - def get_history(self, state, dict_, passive=PASSIVE_OFF): raise NotImplementedError() - def get_all_pending(self, state, dict_): + def get_all_pending(self, state, dict_, passive=PASSIVE_NO_INITIALIZE): """Return a list of tuples of (state, obj) for all objects in this attribute's current state + history. @@ -553,7 +551,11 @@ def get_all_pending(self, state, dict_): def initialize(self, state, dict_): """Initialize the given state's attribute with an empty value.""" - dict_[self.key] = None + # As of 1.0, we don't actually set a value in + # dict_. This is so that the state of the object does not get + # modified without emitting the appropriate events. + + return None def get(self, state, dict_, passive=PASSIVE_OFF): @@ -568,11 +570,13 @@ def get(self, state, dict_, passive=PASSIVE_OFF): # if history present, don't load key = self.key if key not in state.committed_state or \ - state.committed_state[key] is NEVER_SET: + state.committed_state[key] is NEVER_SET: if not passive & CALLABLES_OK: return PASSIVE_NO_RESULT - if key in state.callables: + if key in state.expired_attributes: + value = state._load_expired(state, passive) + elif key in state.callables: callable_ = state.callables[key] value = callable_(state, passive) elif self.callable_: @@ -588,9 +592,9 @@ def get(self, state, dict_, passive=PASSIVE_OFF): except KeyError: # TODO: no test coverage here. raise KeyError( - "Deferred loader for attribute " - "%r failed to populate " - "correctly" % key) + "Deferred loader for attribute " + "%r failed to populate " + "correctly" % key) elif value is not ATTR_EMPTY: return self.set_committed_value(state, dict_, value) @@ -605,14 +609,14 @@ def append(self, state, dict_, value, initiator, passive=PASSIVE_OFF): def remove(self, state, dict_, value, initiator, passive=PASSIVE_OFF): self.set(state, dict_, None, initiator, - passive=passive, check_old=value) + passive=passive, check_old=value) def pop(self, state, dict_, value, initiator, passive=PASSIVE_OFF): self.set(state, dict_, None, initiator, - passive=passive, check_old=value, pop=True) + passive=passive, check_old=value, pop=True) def set(self, state, dict_, value, initiator, - passive=PASSIVE_OFF, check_old=None, pop=False): + passive=PASSIVE_OFF, check_old=None, pop=False): raise NotImplementedError() def get_committed_value(self, state, dict_, passive=PASSIVE_OFF): @@ -620,7 +624,7 @@ def get_committed_value(self, state, dict_, passive=PASSIVE_OFF): if self.key in state.committed_state: value = state.committed_state[self.key] - if value is NO_VALUE: + if value in (NO_VALUE, NEVER_SET): return None else: return value @@ -643,6 +647,23 @@ class ScalarAttributeImpl(AttributeImpl): supports_population = True collection = False + __slots__ = '_replace_token', '_append_token', '_remove_token' + + def __init__(self, *arg, **kw): + super(ScalarAttributeImpl, self).__init__(*arg, **kw) + self._replace_token = self._append_token = None + self._remove_token = None + + def _init_append_token(self): + self._replace_token = self._append_token = Event(self, OP_REPLACE) + return self._replace_token + + _init_append_or_replace_token = _init_append_token + + def _init_remove_token(self): + self._remove_token = Event(self, OP_REMOVE) + return self._remove_token + def delete(self, state, dict_): # TODO: catch key errors, convert to attributeerror? @@ -669,7 +690,7 @@ def get_history(self, state, dict_, passive=PASSIVE_OFF): return History.from_scalar_attribute(self, state, current) def set(self, state, dict_, value, initiator, - passive=PASSIVE_OFF, check_old=None, pop=False): + passive=PASSIVE_OFF, check_old=None, pop=False): if self.dispatch._active_history: old = self.get(state, dict_, PASSIVE_RETURN_NEVER_SET) else: @@ -677,30 +698,22 @@ def set(self, state, dict_, value, initiator, if self.dispatch.set: value = self.fire_replace_event(state, dict_, - value, old, initiator) + value, old, initiator) state._modified_event(dict_, self, old) dict_[self.key] = value - @util.memoized_property - def _replace_token(self): - return Event(self, OP_REPLACE) - - @util.memoized_property - def _append_token(self): - return Event(self, OP_REPLACE) - - @util.memoized_property - def _remove_token(self): - return Event(self, OP_REMOVE) - def fire_replace_event(self, state, dict_, value, previous, initiator): for fn in self.dispatch.set: - value = fn(state, value, previous, initiator or self._replace_token) + value = fn( + state, value, previous, + initiator or self._replace_token or + self._init_append_or_replace_token()) return value def fire_remove_event(self, state, dict_, value, initiator): for fn in self.dispatch.remove: - fn(state, value, initiator or self._remove_token) + fn(state, value, + initiator or self._remove_token or self._init_remove_token()) @property def type(self): @@ -720,9 +733,13 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl): supports_population = True collection = False + __slots__ = () + def delete(self, state, dict_): old = self.get(state, dict_) - self.fire_remove_event(state, dict_, old, self._remove_token) + self.fire_remove_event( + state, dict_, old, + self._remove_token or self._init_remove_token()) del dict_[self.key] def get_history(self, state, dict_, passive=PASSIVE_OFF): @@ -737,68 +754,79 @@ def get_history(self, state, dict_, passive=PASSIVE_OFF): else: return History.from_object_attribute(self, state, current) - def get_all_pending(self, state, dict_): + def get_all_pending(self, state, dict_, passive=PASSIVE_NO_INITIALIZE): if self.key in dict_: current = dict_[self.key] - if current is not None: - ret = [(instance_state(current), current)] - else: - ret = [(None, None)] + elif passive & CALLABLES_OK: + current = self.get(state, dict_, passive=passive) + else: + return [] + + # can't use __hash__(), can't use __eq__() here + if current is not None and \ + current is not PASSIVE_NO_RESULT and \ + current is not NEVER_SET: + ret = [(instance_state(current), current)] + else: + ret = [(None, None)] - if self.key in state.committed_state: - original = state.committed_state[self.key] - if original not in (NEVER_SET, PASSIVE_NO_RESULT, None) and \ + if self.key in state.committed_state: + original = state.committed_state[self.key] + if original is not None and \ + original is not PASSIVE_NO_RESULT and \ + original is not NEVER_SET and \ original is not current: - ret.append((instance_state(original), original)) - return ret - else: - return [] + ret.append((instance_state(original), original)) + return ret def set(self, state, dict_, value, initiator, - passive=PASSIVE_OFF, check_old=None, pop=False): + passive=PASSIVE_OFF, check_old=None, pop=False): """Set a value on the given InstanceState. """ if self.dispatch._active_history: - old = self.get(state, dict_, passive=PASSIVE_ONLY_PERSISTENT | NO_AUTOFLUSH) + old = self.get( + state, dict_, passive=PASSIVE_ONLY_PERSISTENT | NO_AUTOFLUSH) else: - old = self.get(state, dict_, passive=PASSIVE_NO_FETCH) + old = self.get(state, dict_, passive=PASSIVE_NO_FETCH ^ INIT_OK) if check_old is not None and \ - old is not PASSIVE_NO_RESULT and \ - check_old is not old: + old is not PASSIVE_NO_RESULT and \ + check_old is not old: if pop: return else: raise ValueError( "Object %s not associated with %s on attribute '%s'" % ( - instance_str(check_old), - state_str(state), - self.key - )) + instance_str(check_old), + state_str(state), + self.key + )) + value = self.fire_replace_event(state, dict_, value, old, initiator) dict_[self.key] = value - def fire_remove_event(self, state, dict_, value, initiator): if self.trackparent and value is not None: self.sethasparent(instance_state(value), state, False) for fn in self.dispatch.remove: - fn(state, value, initiator or self._remove_token) + fn(state, value, initiator or + self._remove_token or self._init_remove_token()) state._modified_event(dict_, self, value) def fire_replace_event(self, state, dict_, value, previous, initiator): if self.trackparent: if (previous is not value and - previous is not None and - previous is not PASSIVE_NO_RESULT): + previous not in (None, PASSIVE_NO_RESULT, NEVER_SET)): self.sethasparent(instance_state(previous), state, False) for fn in self.dispatch.set: - value = fn(state, value, previous, initiator or self._replace_token) + value = fn( + state, value, previous, initiator or + self._replace_token or self._init_append_or_replace_token()) state._modified_event(dict_, self, previous) @@ -825,22 +853,44 @@ class CollectionAttributeImpl(AttributeImpl): supports_population = True collection = True + __slots__ = 'copy', 'collection_factory', '_append_token', '_remove_token' + def __init__(self, class_, key, callable_, dispatch, - typecallable=None, trackparent=False, extension=None, - copy_function=None, compare_function=None, **kwargs): + typecallable=None, trackparent=False, extension=None, + copy_function=None, compare_function=None, **kwargs): super(CollectionAttributeImpl, self).__init__( - class_, - key, - callable_, dispatch, - trackparent=trackparent, - extension=extension, - compare_function=compare_function, - **kwargs) + class_, + key, + callable_, dispatch, + trackparent=trackparent, + extension=extension, + compare_function=compare_function, + **kwargs) if copy_function is None: copy_function = self.__copy self.copy = copy_function self.collection_factory = typecallable + self._append_token = None + self._remove_token = None + + if getattr(self.collection_factory, "_sa_linker", None): + + @event.listens_for(self, "init_collection") + def link(target, collection, collection_adapter): + collection._sa_linker(collection_adapter) + + @event.listens_for(self, "dispose_collection") + def unlink(target, collection, collection_adapter): + collection._sa_linker(None) + + def _init_append_token(self): + self._append_token = Event(self, OP_APPEND) + return self._append_token + + def _init_remove_token(self): + self._remove_token = Event(self, OP_REMOVE) + return self._remove_token def __copy(self, item): return [y for y in collections.collection_adapter(item)] @@ -852,7 +902,9 @@ def get_history(self, state, dict_, passive=PASSIVE_OFF): else: return History.from_collection(self, state, current) - def get_all_pending(self, state, dict_): + def get_all_pending(self, state, dict_, passive=PASSIVE_NO_INITIALIZE): + # NOTE: passive is ignored here at the moment + if self.key not in dict_: return [] @@ -863,11 +915,11 @@ def get_all_pending(self, state, dict_): original = state.committed_state[self.key] if original not in (NO_VALUE, NEVER_SET): current_states = [((c is not None) and - instance_state(c) or None, c) - for c in current] + instance_state(c) or None, c) + for c in current] original_states = [((c is not None) and instance_state(c) or None, c) - for c in original] + for c in original] current_set = dict(current_states) original_set = dict(original_states) @@ -882,17 +934,11 @@ def get_all_pending(self, state, dict_): return [(instance_state(o), o) for o in current] - @util.memoized_property - def _append_token(self): - return Event(self, OP_APPEND) - - @util.memoized_property - def _remove_token(self): - return Event(self, OP_REMOVE) - def fire_append_event(self, state, dict_, value, initiator): for fn in self.dispatch.append: - value = fn(state, value, initiator or self._append_token) + value = fn( + state, value, + initiator or self._append_token or self._init_append_token()) state._modified_event(dict_, self, NEVER_SET, True) @@ -909,7 +955,8 @@ def fire_remove_event(self, state, dict_, value, initiator): self.sethasparent(instance_state(value), state, False) for fn in self.dispatch.remove: - fn(state, value, initiator or self._remove_token) + fn(state, value, + initiator or self._remove_token or self._init_remove_token()) state._modified_event(dict_, self, NEVER_SET, True) @@ -932,15 +979,20 @@ def initialize(self, state, dict_): return user_data def _initialize_collection(self, state): - return state.manager.initialize_collection( + + adapter, collection = state.manager.initialize_collection( self.key, state, self.collection_factory) + self.dispatch.init_collection(state, collection, adapter) + + return adapter, collection + def append(self, state, dict_, value, initiator, passive=PASSIVE_OFF): collection = self.get_collection(state, dict_, passive=passive) if collection is PASSIVE_NO_RESULT: value = self.fire_append_event(state, dict_, value, initiator) assert self.key not in dict_, \ - "Collection was loaded during event handling." + "Collection was loaded during event handling." state._get_pending_mutation(self.key).append(value) else: collection.append_with_event(value, initiator) @@ -950,7 +1002,7 @@ def remove(self, state, dict_, value, initiator, passive=PASSIVE_OFF): if collection is PASSIVE_NO_RESULT: self.fire_remove_event(state, dict_, value, initiator) assert self.key not in dict_, \ - "Collection was loaded during event handling." + "Collection was loaded during event handling." state._get_pending_mutation(self.key).remove(value) else: collection.remove_with_event(value, initiator) @@ -965,7 +1017,7 @@ def pop(self, state, dict_, value, initiator, passive=PASSIVE_OFF): pass def set(self, state, dict_, value, initiator, - passive=PASSIVE_OFF, pop=False): + passive=PASSIVE_OFF, pop=False): """Set a value on the given object. """ @@ -1003,12 +1055,14 @@ def _set_iterable(self, state, dict_, iterable, adapter=None): # place a copy of "old" in state.committed_state state._modified_event(dict_, self, old, True) - old_collection = getattr(old, '_sa_adapter') + old_collection = old._sa_adapter dict_[self.key] = user_data collections.bulk_replace(new_values, old_collection, new_collection) - old_collection.unlink(old) + + del old._sa_adapter + self.dispatch.dispose_collection(state, old, old_collection) def _invalidate_collection(self, collection): adapter = getattr(collection, '_sa_adapter') @@ -1042,7 +1096,7 @@ def set_committed_value(self, state, dict_, value): return user_data def get_collection(self, state, dict_, - user_data=None, passive=PASSIVE_OFF): + user_data=None, passive=PASSIVE_OFF): """Retrieve the CollectionAdapter associated with the given state. Creates a new CollectionAdapter if one does not exist. @@ -1080,24 +1134,27 @@ def _acceptable_key_err(child_state, initiator, child_impl): def emit_backref_from_scalar_set_event(state, child, oldchild, initiator): if oldchild is child: return child - if oldchild is not None and oldchild is not PASSIVE_NO_RESULT: + if oldchild is not None and \ + oldchild is not PASSIVE_NO_RESULT and \ + oldchild is not NEVER_SET: # With lazy=None, there's no guarantee that the full collection is # present when updating via a backref. old_state, old_dict = instance_state(oldchild),\ - instance_dict(oldchild) + instance_dict(oldchild) impl = old_state.manager[key].impl if initiator.impl is not impl or \ initiator.op not in (OP_REPLACE, OP_REMOVE): impl.pop(old_state, - old_dict, - state.obj(), - parent_impl._append_token, - passive=PASSIVE_NO_FETCH) + old_dict, + state.obj(), + parent_impl._append_token or + parent_impl._init_append_token(), + passive=PASSIVE_NO_FETCH) if child is not None: child_state, child_dict = instance_state(child),\ - instance_dict(child) + instance_dict(child) child_impl = child_state.manager[key].impl if initiator.parent_token is not parent_token and \ initiator.parent_token is not child_impl.parent_token: @@ -1105,11 +1162,11 @@ def emit_backref_from_scalar_set_event(state, child, oldchild, initiator): elif initiator.impl is not child_impl or \ initiator.op not in (OP_APPEND, OP_REPLACE): child_impl.append( - child_state, - child_dict, - state.obj(), - initiator, - passive=PASSIVE_NO_FETCH) + child_state, + child_dict, + state.obj(), + initiator, + passive=PASSIVE_NO_FETCH) return child def emit_backref_from_collection_append_event(state, child, initiator): @@ -1117,7 +1174,7 @@ def emit_backref_from_collection_append_event(state, child, initiator): return child_state, child_dict = instance_state(child), \ - instance_dict(child) + instance_dict(child) child_impl = child_state.manager[key].impl if initiator.parent_token is not parent_token and \ @@ -1126,49 +1183,49 @@ def emit_backref_from_collection_append_event(state, child, initiator): elif initiator.impl is not child_impl or \ initiator.op not in (OP_APPEND, OP_REPLACE): child_impl.append( - child_state, - child_dict, - state.obj(), - initiator, - passive=PASSIVE_NO_FETCH) + child_state, + child_dict, + state.obj(), + initiator, + passive=PASSIVE_NO_FETCH) return child def emit_backref_from_collection_remove_event(state, child, initiator): if child is not None: child_state, child_dict = instance_state(child),\ - instance_dict(child) + instance_dict(child) child_impl = child_state.manager[key].impl if initiator.impl is not child_impl or \ initiator.op not in (OP_REMOVE, OP_REPLACE): child_impl.pop( - child_state, - child_dict, - state.obj(), - initiator, - passive=PASSIVE_NO_FETCH) + child_state, + child_dict, + state.obj(), + initiator, + passive=PASSIVE_NO_FETCH) if uselist: event.listen(attribute, "append", - emit_backref_from_collection_append_event, - retval=True, raw=True) + emit_backref_from_collection_append_event, + retval=True, raw=True) else: event.listen(attribute, "set", - emit_backref_from_scalar_set_event, - retval=True, raw=True) + emit_backref_from_scalar_set_event, + retval=True, raw=True) # TODO: need coverage in test/orm/ of remove event event.listen(attribute, "remove", - emit_backref_from_collection_remove_event, - retval=True, raw=True) + emit_backref_from_collection_remove_event, + retval=True, raw=True) _NO_HISTORY = util.symbol('NO_HISTORY') _NO_STATE_SYMBOLS = frozenset([ - id(PASSIVE_NO_RESULT), - id(NO_VALUE), - id(NEVER_SET)]) + id(PASSIVE_NO_RESULT), + id(NO_VALUE), + id(NEVER_SET)]) History = util.namedtuple("History", [ - "added", "unchanged", "deleted" - ]) + "added", "unchanged", "deleted" +]) class History(History): @@ -1207,28 +1264,28 @@ def empty(self): """ return not bool( - (self.added or self.deleted) - or self.unchanged and self.unchanged != [None] - ) + (self.added or self.deleted) + or self.unchanged + ) def sum(self): """Return a collection of added + unchanged + deleted.""" return (self.added or []) +\ - (self.unchanged or []) +\ - (self.deleted or []) + (self.unchanged or []) +\ + (self.deleted or []) def non_deleted(self): """Return a collection of added + unchanged.""" return (self.added or []) +\ - (self.unchanged or []) + (self.unchanged or []) def non_added(self): """Return a collection of unchanged + deleted.""" return (self.unchanged or []) +\ - (self.deleted or []) + (self.deleted or []) def has_changes(self): """Return True if this :class:`.History` has changes.""" @@ -1246,7 +1303,7 @@ def as_state(self): [(c is not None) and instance_state(c) or None for c in self.deleted], - ) + ) @classmethod def from_scalar_attribute(cls, attribute, state, current): @@ -1316,13 +1373,13 @@ def from_collection(cls, attribute, state, current): else: current_states = [((c is not None) and instance_state(c) - or None, c) - for c in current - ] + or None, c) + for c in current + ] original_states = [((c is not None) and instance_state(c) or None, c) - for c in original - ] + for c in original + ] current_set = dict(current_states) original_set = dict(original_states) @@ -1354,11 +1411,11 @@ def get_history(obj, key, passive=PASSIVE_OFF): """ if passive is True: util.warn_deprecated("Passing True for 'passive' is deprecated. " - "Use attributes.PASSIVE_NO_INITIALIZE") + "Use attributes.PASSIVE_NO_INITIALIZE") passive = PASSIVE_NO_INITIALIZE elif passive is False: util.warn_deprecated("Passing False for 'passive' is " - "deprecated. Use attributes.PASSIVE_OFF") + "deprecated. Use attributes.PASSIVE_OFF") passive = PASSIVE_OFF return get_state_history(instance_state(obj), key, passive) @@ -1380,15 +1437,15 @@ def register_attribute(class_, key, **kw): parententity = kw.pop('parententity', None) doc = kw.pop('doc', None) desc = register_descriptor(class_, key, - comparator, parententity, doc=doc) + comparator, parententity, doc=doc) register_attribute_impl(class_, key, **kw) return desc def register_attribute_impl(class_, key, - uselist=False, callable_=None, - useobject=False, - impl_class=None, backref=None, **kw): + uselist=False, callable_=None, + useobject=False, + impl_class=None, backref=None, **kw): manager = manager_of_class(class_) if uselist: @@ -1407,7 +1464,7 @@ def register_attribute_impl(class_, key, typecallable=typecallable, **kw) elif useobject: impl = ScalarObjectAttributeImpl(class_, key, callable_, - dispatch, **kw) + dispatch, **kw) else: impl = ScalarAttributeImpl(class_, key, callable_, dispatch, **kw) @@ -1421,11 +1478,11 @@ def register_attribute_impl(class_, key, def register_descriptor(class_, key, comparator=None, - parententity=None, doc=None): + parententity=None, doc=None): manager = manager_of_class(class_) descriptor = InstrumentedAttribute(class_, key, comparator=comparator, - parententity=parententity) + parententity=parententity) descriptor.__doc__ = doc diff --git a/lib/sqlalchemy/orm/base.py b/lib/sqlalchemy/orm/base.py index e973de8972..8d86fb24e2 100644 --- a/lib/sqlalchemy/orm/base.py +++ b/lib/sqlalchemy/orm/base.py @@ -1,5 +1,6 @@ # orm/base.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -13,102 +14,127 @@ from . import exc import operator -PASSIVE_NO_RESULT = util.symbol('PASSIVE_NO_RESULT', -"""Symbol returned by a loader callable or other attribute/history -retrieval operation when a value could not be determined, based -on loader callable flags. -""" +PASSIVE_NO_RESULT = util.symbol( + 'PASSIVE_NO_RESULT', + """Symbol returned by a loader callable or other attribute/history + retrieval operation when a value could not be determined, based + on loader callable flags. + """ ) -ATTR_WAS_SET = util.symbol('ATTR_WAS_SET', -"""Symbol returned by a loader callable to indicate the -retrieved value, or values, were assigned to their attributes -on the target object. -""") +ATTR_WAS_SET = util.symbol( + 'ATTR_WAS_SET', + """Symbol returned by a loader callable to indicate the + retrieved value, or values, were assigned to their attributes + on the target object. + """ +) -ATTR_EMPTY = util.symbol('ATTR_EMPTY', -"""Symbol used internally to indicate an attribute had no callable. -""") +ATTR_EMPTY = util.symbol( + 'ATTR_EMPTY', + """Symbol used internally to indicate an attribute had no callable.""" +) -NO_VALUE = util.symbol('NO_VALUE', -"""Symbol which may be placed as the 'previous' value of an attribute, -indicating no value was loaded for an attribute when it was modified, -and flags indicated we were not to load it. -""" +NO_VALUE = util.symbol( + 'NO_VALUE', + """Symbol which may be placed as the 'previous' value of an attribute, + indicating no value was loaded for an attribute when it was modified, + and flags indicated we were not to load it. + """ ) -NEVER_SET = util.symbol('NEVER_SET', -"""Symbol which may be placed as the 'previous' value of an attribute -indicating that the attribute had not been assigned to previously. -""" +NEVER_SET = util.symbol( + 'NEVER_SET', + """Symbol which may be placed as the 'previous' value of an attribute + indicating that the attribute had not been assigned to previously. + """ ) -NO_CHANGE = util.symbol("NO_CHANGE", -"""No callables or SQL should be emitted on attribute access -and no state should change""", canonical=0 +NO_CHANGE = util.symbol( + "NO_CHANGE", + """No callables or SQL should be emitted on attribute access + and no state should change + """, canonical=0 ) -CALLABLES_OK = util.symbol("CALLABLES_OK", -"""Loader callables can be fired off if a value -is not present.""", canonical=1 +CALLABLES_OK = util.symbol( + "CALLABLES_OK", + """Loader callables can be fired off if a value + is not present. + """, canonical=1 ) -SQL_OK = util.symbol("SQL_OK", -"""Loader callables can emit SQL at least on scalar value -attributes.""", canonical=2) +SQL_OK = util.symbol( + "SQL_OK", + """Loader callables can emit SQL at least on scalar value attributes.""", + canonical=2 +) -RELATED_OBJECT_OK = util.symbol("RELATED_OBJECT_OK", -"""callables can use SQL to load related objects as well -as scalar value attributes. -""", canonical=4 +RELATED_OBJECT_OK = util.symbol( + "RELATED_OBJECT_OK", + """Callables can use SQL to load related objects as well + as scalar value attributes. + """, canonical=4 ) -INIT_OK = util.symbol("INIT_OK", -"""Attributes should be initialized with a blank -value (None or an empty collection) upon get, if no other -value can be obtained. -""", canonical=8 +INIT_OK = util.symbol( + "INIT_OK", + """Attributes should be initialized with a blank + value (None or an empty collection) upon get, if no other + value can be obtained. + """, canonical=8 ) -NON_PERSISTENT_OK = util.symbol("NON_PERSISTENT_OK", -"""callables can be emitted if the parent is not persistent.""", -canonical=16 +NON_PERSISTENT_OK = util.symbol( + "NON_PERSISTENT_OK", + """Callables can be emitted if the parent is not persistent.""", + canonical=16 ) -LOAD_AGAINST_COMMITTED = util.symbol("LOAD_AGAINST_COMMITTED", -"""callables should use committed values as primary/foreign keys during a load -""", canonical=32 +LOAD_AGAINST_COMMITTED = util.symbol( + "LOAD_AGAINST_COMMITTED", + """Callables should use committed values as primary/foreign keys during a + load. + """, canonical=32 ) -NO_AUTOFLUSH = util.symbol("NO_AUTOFLUSH", -"""loader callables should disable autoflush. -""", canonical=64) +NO_AUTOFLUSH = util.symbol( + "NO_AUTOFLUSH", + """Loader callables should disable autoflush.""", + canonical=64 +) # pre-packaged sets of flags used as inputs -PASSIVE_OFF = util.symbol("PASSIVE_OFF", +PASSIVE_OFF = util.symbol( + "PASSIVE_OFF", "Callables can be emitted in all cases.", canonical=(RELATED_OBJECT_OK | NON_PERSISTENT_OK | - INIT_OK | CALLABLES_OK | SQL_OK) + INIT_OK | CALLABLES_OK | SQL_OK) ) -PASSIVE_RETURN_NEVER_SET = util.symbol("PASSIVE_RETURN_NEVER_SET", - """PASSIVE_OFF ^ INIT_OK""", - canonical=PASSIVE_OFF ^ INIT_OK +PASSIVE_RETURN_NEVER_SET = util.symbol( + "PASSIVE_RETURN_NEVER_SET", + """PASSIVE_OFF ^ INIT_OK""", + canonical=PASSIVE_OFF ^ INIT_OK ) -PASSIVE_NO_INITIALIZE = util.symbol("PASSIVE_NO_INITIALIZE", - "PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK", - canonical=PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK +PASSIVE_NO_INITIALIZE = util.symbol( + "PASSIVE_NO_INITIALIZE", + "PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK", + canonical=PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK ) -PASSIVE_NO_FETCH = util.symbol("PASSIVE_NO_FETCH", - "PASSIVE_OFF ^ SQL_OK", - canonical=PASSIVE_OFF ^ SQL_OK +PASSIVE_NO_FETCH = util.symbol( + "PASSIVE_NO_FETCH", + "PASSIVE_OFF ^ SQL_OK", + canonical=PASSIVE_OFF ^ SQL_OK ) -PASSIVE_NO_FETCH_RELATED = util.symbol("PASSIVE_NO_FETCH_RELATED", - "PASSIVE_OFF ^ RELATED_OBJECT_OK", - canonical=PASSIVE_OFF ^ RELATED_OBJECT_OK +PASSIVE_NO_FETCH_RELATED = util.symbol( + "PASSIVE_NO_FETCH_RELATED", + "PASSIVE_OFF ^ RELATED_OBJECT_OK", + canonical=PASSIVE_OFF ^ RELATED_OBJECT_OK ) -PASSIVE_ONLY_PERSISTENT = util.symbol("PASSIVE_ONLY_PERSISTENT", - "PASSIVE_OFF ^ NON_PERSISTENT_OK", - canonical=PASSIVE_OFF ^ NON_PERSISTENT_OK +PASSIVE_ONLY_PERSISTENT = util.symbol( + "PASSIVE_ONLY_PERSISTENT", + "PASSIVE_OFF ^ NON_PERSISTENT_OK", + canonical=PASSIVE_OFF ^ NON_PERSISTENT_OK ) DEFAULT_MANAGER_ATTR = '_sa_class_manager' @@ -118,40 +144,50 @@ EXT_CONTINUE = util.symbol('EXT_CONTINUE') EXT_STOP = util.symbol('EXT_STOP') -ONETOMANY = util.symbol('ONETOMANY', -"""Indicates the one-to-many direction for a :func:`.relationship`. +ONETOMANY = util.symbol( + 'ONETOMANY', + """Indicates the one-to-many direction for a :func:`.relationship`. + + This symbol is typically used by the internals but may be exposed within + certain API features. -This symbol is typically used by the internals but may be exposed within -certain API features. + """) -""") +MANYTOONE = util.symbol( + 'MANYTOONE', + """Indicates the many-to-one direction for a :func:`.relationship`. -MANYTOONE = util.symbol('MANYTOONE', -"""Indicates the many-to-one direction for a :func:`.relationship`. + This symbol is typically used by the internals but may be exposed within + certain API features. -This symbol is typically used by the internals but may be exposed within -certain API features. + """) -""") +MANYTOMANY = util.symbol( + 'MANYTOMANY', + """Indicates the many-to-many direction for a :func:`.relationship`. -MANYTOMANY = util.symbol('MANYTOMANY', -"""Indicates the many-to-many direction for a :func:`.relationship`. + This symbol is typically used by the internals but may be exposed within + certain API features. -This symbol is typically used by the internals but may be exposed within -certain API features. + """) -""") +NOT_EXTENSION = util.symbol( + 'NOT_EXTENSION', + """Symbol indicating an :class:`InspectionAttr` that's + not part of sqlalchemy.ext. -NOT_EXTENSION = util.symbol('NOT_EXTENSION', -"""Symbol indicating an :class:`_InspectionAttr` that's - not part of sqlalchemy.ext. + Is assigned to the :attr:`.InspectionAttr.extension_type` + attibute. - Is assigned to the :attr:`._InspectionAttr.extension_type` - attibute. + """) -""") +_never_set = frozenset([NEVER_SET]) -_none_set = frozenset([None]) +_none_set = frozenset([None, NEVER_SET, PASSIVE_NO_RESULT]) + +_SET_DEFERRED_EXPIRED = util.symbol("SET_DEFERRED_EXPIRED") + +_DEFER_FOR_STATE = util.symbol("DEFER_FOR_STATE") def _generative(*assertions): @@ -176,11 +212,13 @@ def manager_of_class(cls): instance_dict = operator.attrgetter('__dict__') + def instance_str(instance): """Return a string describing an instance.""" return state_str(instance_state(instance)) + def state_str(state): """Return a string describing an instance via its InstanceState.""" @@ -189,8 +227,11 @@ def state_str(state): else: return '<%s at 0x%x>' % (state.class_.__name__, id(state.obj())) + def state_class_str(state): - """Return a string describing an instance's class via its InstanceState.""" + """Return a string describing an instance's class via its + InstanceState. + """ if state is None: return "None" @@ -205,6 +246,7 @@ def attribute_str(instance, attribute): def state_attribute_str(state, attribute): return state_str(state) + "." + attribute + def object_mapper(instance): """Given an object, return the primary Mapper associated with the object instance. @@ -260,7 +302,6 @@ def _inspect_mapped_object(instance): return None - def _class_to_mapper(class_or_mapper): insp = inspection.inspect(class_or_mapper, False) if insp is not None: @@ -271,7 +312,8 @@ def _class_to_mapper(class_or_mapper): def _mapper_or_none(entity): """Return the :class:`.Mapper` for the given class or None if the - class is not mapped.""" + class is not mapped. + """ insp = inspection.inspect(entity, False) if insp is not None: @@ -282,16 +324,17 @@ class is not mapped.""" def _is_mapped_class(entity): """Return True if the given object is a mapped class, - :class:`.Mapper`, or :class:`.AliasedClass`.""" + :class:`.Mapper`, or :class:`.AliasedClass`. + """ insp = inspection.inspect(entity, False) return insp is not None and \ - hasattr(insp, "mapper") and \ + not insp.is_clause_element and \ ( - insp.is_mapper - or insp.is_aliased_class + insp.is_mapper or insp.is_aliased_class ) + def _attr_as_key(attr): if hasattr(attr, 'key'): return attr.key @@ -299,16 +342,14 @@ def _attr_as_key(attr): return expression._column_as_key(attr) - def _orm_columns(entity): insp = inspection.inspect(entity, False) - if hasattr(insp, 'selectable'): + if hasattr(insp, 'selectable') and hasattr(insp.selectable, 'c'): return [c for c in insp.selectable.c] else: return [entity] - def _is_aliased_class(entity): insp = inspection.inspect(entity, False) return insp is not None and \ @@ -338,12 +379,13 @@ def _entity_descriptor(entity, key): return getattr(entity, key) except AttributeError: raise sa_exc.InvalidRequestError( - "Entity '%s' has no property '%s'" % - (description, key) - ) + "Entity '%s' has no property '%s'" % + (description, key) + ) _state_mapper = util.dottedgetter('manager.mapper') + @inspection._inspects(type) def _inspect_mapped_class(class_, configure=False): try: @@ -351,12 +393,13 @@ def _inspect_mapped_class(class_, configure=False): if not class_manager.is_mapped: return None mapper = class_manager.mapper + except exc.NO_STATE: + return None + else: if configure and mapper._new_mappers: mapper._configure_all() return mapper - except exc.NO_STATE: - return None def class_mapper(class_, configure=True): """Given a class, return the primary :class:`.Mapper` associated @@ -379,13 +422,13 @@ def class_mapper(class_, configure=True): if mapper is None: if not isinstance(class_, type): raise sa_exc.ArgumentError( - "Class object expected, got '%r'." % (class_, )) + "Class object expected, got '%r'." % (class_, )) raise exc.UnmappedClassError(class_) else: return mapper -class _InspectionAttr(object): +class InspectionAttr(object): """A base class applied to all ORM objects that can be returned by the :func:`.inspect` function. @@ -399,6 +442,7 @@ class _InspectionAttr(object): here intact for forwards-compatibility. """ + __slots__ = () is_selectable = False """Return True if this object is an instance of :class:`.Selectable`.""" @@ -422,7 +466,7 @@ class _InspectionAttr(object): :class:`.QueryableAttribute` which handles attributes events on behalf of a :class:`.MapperProperty`. But can also be an extension type such as :class:`.AssociationProxy` or :class:`.hybrid_property`. - The :attr:`._InspectionAttr.extension_type` will refer to a constant + The :attr:`.InspectionAttr.extension_type` will refer to a constant identifying the specific subtype. .. seealso:: @@ -450,8 +494,47 @@ class _InspectionAttr(object): """ + +class InspectionAttrInfo(InspectionAttr): + """Adds the ``.info`` attribute to :class:`.InspectionAttr`. + + The rationale for :class:`.InspectionAttr` vs. :class:`.InspectionAttrInfo` + is that the former is compatible as a mixin for classes that specify + ``__slots__``; this is essentially an implementation artifact. + + """ + + @util.memoized_property + def info(self): + """Info dictionary associated with the object, allowing user-defined + data to be associated with this :class:`.InspectionAttr`. + + The dictionary is generated when first accessed. Alternatively, + it can be specified as a constructor argument to the + :func:`.column_property`, :func:`.relationship`, or :func:`.composite` + functions. + + .. versionadded:: 0.8 Added support for .info to all + :class:`.MapperProperty` subclasses. + + .. versionchanged:: 1.0.0 :attr:`.MapperProperty.info` is also + available on extension types via the + :attr:`.InspectionAttrInfo.info` attribute, so that it can apply + to a wider variety of ORM and extension constructs. + + .. seealso:: + + :attr:`.QueryableAttribute.info` + + :attr:`.SchemaItem.info` + + """ + return {} + + class _MappedAttribute(object): """Mixin for attributes which should be replaced by mapper-assigned attributes. """ + __slots__ = () diff --git a/lib/sqlalchemy/orm/collections.py b/lib/sqlalchemy/orm/collections.py index 87e351b6ca..f3c609f154 100644 --- a/lib/sqlalchemy/orm/collections.py +++ b/lib/sqlalchemy/orm/collections.py @@ -1,5 +1,6 @@ # orm/collections.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -110,6 +111,7 @@ def shift(self): from .. import util, exc as sa_exc from . import base +from sqlalchemy.util.compat import inspect_getargspec __all__ = ['collection', 'collection_adapter', 'mapped_collection', 'column_mapped_collection', @@ -127,6 +129,7 @@ class _PlainColumnGetter(object): and some rare caveats. """ + def __init__(self, cols): self.cols = cols self.composite = len(cols) > 1 @@ -158,6 +161,7 @@ class _SerializableColumnGetter(object): Remains here for pickle compatibility with 0.7.6. """ + def __init__(self, colkeys): self.colkeys = colkeys self.composite = len(colkeys) > 1 @@ -169,9 +173,9 @@ def __call__(self, value): state = base.instance_state(value) m = base._state_mapper(state) key = [m._get_state_attr_by_column( - state, state.dict, - m.mapped_table.columns[k]) - for k in self.colkeys] + state, state.dict, + m.mapped_table.columns[k]) + for k in self.colkeys] if self.composite: return tuple(key) else: @@ -212,8 +216,8 @@ def _cols(self, mapper): metadata = getattr(mapper.local_table, 'metadata', None) for (ckey, tkey) in self.colkeys: if tkey is None or \ - metadata is None or \ - tkey not in metadata: + metadata is None or \ + tkey not in metadata: cols.append(mapper.local_table.c[ckey]) else: cols.append(metadata.tables[tkey].c[ckey]) @@ -234,7 +238,7 @@ def column_mapped_collection(mapping_spec): """ cols = [expression._only_column_elements(q, "mapping_spec") - for q in util.to_list(mapping_spec) + for q in util.to_list(mapping_spec) ] keyfunc = _PlainColumnGetter(cols) return lambda: MappedCollection(keyfunc) @@ -426,6 +430,10 @@ def linker(fn): the instance. A single argument is passed: the collection adapter that has been linked, or None if unlinking. + .. deprecated:: 1.0.0 - the :meth:`.collection.linker` handler + is superseded by the :meth:`.AttributeEvents.init_collection` + and :meth:`.AttributeEvents.dispose_collection` handlers. + """ fn._sa_instrument_role = 'linker' return fn @@ -445,7 +453,7 @@ def converter(fn): The converter method will receive the object being assigned and should return an iterable of values suitable for use by the ``appender`` method. A converter must not assign values or mutate the collection, - it's sole job is to adapt the value the user provides into an iterable + its sole job is to adapt the value the user provides into an iterable of values for the ORM's use. The default converter implementation will use duck-typing to do the @@ -533,9 +541,9 @@ def decorator(fn): def removes_return(): """Mark the method as removing an entity in the collection. - Adds "remove from collection" handling to the method. The return value - of the method, if any, is considered the value to remove. The method - arguments are not inspected:: + Adds "remove from collection" handling to the method. The return + value of the method, if any, is considered the value to remove. The + method arguments are not inspected:: @collection.removes_return() def pop(self): ... @@ -572,7 +580,7 @@ def __init__(self, attr, owner_state, data): self._key = attr.key self._data = weakref.ref(data) self.owner_state = owner_state - self.link_to_self(data) + data._sa_adapter = self def _warn_invalidated(self): util.warn("This collection has been invalidated.") @@ -582,24 +590,19 @@ def data(self): "The entity collection being adapted." return self._data() - @util.memoized_property - def attr(self): - return self.owner_state.manager[self._key].impl - - def link_to_self(self, data): - """Link a collection to this adapter""" - - data._sa_adapter = self - if data._sa_linker: - data._sa_linker(self) + @property + def _referenced_by_owner(self): + """return True if the owner state still refers to this collection. + This will return False within a bulk replace operation, + where this collection is the one being replaced. - def unlink(self, data): - """Unlink a collection from any adapter""" + """ + return self.owner_state.dict[self._key] is self._data() - del data._sa_adapter - if data._sa_linker: - data._sa_linker(None) + @util.memoized_property + def attr(self): + return self.owner_state.manager[self._key].impl def adapt_like_to_iterable(self, obj): """Converts collection-compatible objects to an iterable of values. @@ -631,7 +634,7 @@ def adapt_like_to_iterable(self, obj): raise TypeError( "Incompatible collection type: %s is not %s-like" % ( - given, wanted)) + given, wanted)) # If the object is an adapted collection, return the (iterable) # adapter. @@ -709,9 +712,9 @@ def fire_append_event(self, item, initiator=None): if self.invalidated: self._warn_invalidated() return self.attr.fire_append_event( - self.owner_state, - self.owner_state.dict, - item, initiator) + self.owner_state, + self.owner_state.dict, + item, initiator) else: return item @@ -727,9 +730,9 @@ def fire_remove_event(self, item, initiator=None): if self.invalidated: self._warn_invalidated() self.attr.fire_remove_event( - self.owner_state, - self.owner_state.dict, - item, initiator) + self.owner_state, + self.owner_state.dict, + item, initiator) def fire_pre_remove_event(self, initiator=None): """Notify that an entity is about to be removed from the collection. @@ -741,9 +744,9 @@ def fire_pre_remove_event(self, initiator=None): if self.invalidated: self._warn_invalidated() self.attr.fire_pre_remove_event( - self.owner_state, - self.owner_state.dict, - initiator=initiator) + self.owner_state, + self.owner_state.dict, + initiator=initiator) def __getstate__(self): return {'key': self._key, @@ -847,6 +850,7 @@ def wrapper(): return wrapper + def _instrument_class(cls): """Modify methods in a class and install instrumentation.""" @@ -858,11 +862,24 @@ def _instrument_class(cls): "Can not instrument a built-in type. Use a " "subclass, even a trivial one.") + roles, methods = _locate_roles_and_methods(cls) + + _setup_canned_roles(cls, roles, methods) + + _assert_required_roles(cls, roles, methods) + + _set_collection_attributes(cls, roles, methods) + + +def _locate_roles_and_methods(cls): + """search for _sa_instrument_role-decorated methods in + method resolution order, assign to roles. + + """ + roles = {} methods = {} - # search for _sa_instrument_role-decorated methods in - # method resolution order, assign to roles for supercls in cls.__mro__: for name, method in vars(supercls).items(): if not util.callable(method): @@ -887,14 +904,19 @@ def _instrument_class(cls): assert op in ('fire_append_event', 'fire_remove_event') after = op if before: - methods[name] = before[0], before[1], after + methods[name] = before + (after, ) elif after: methods[name] = None, None, after + return roles, methods + + +def _setup_canned_roles(cls, roles, methods): + """see if this class has "canned" roles based on a known + collection type (dict, set, list). Apply those roles + as needed to the "roles" dictionary, and also + prepare "decorator" methods - # see if this class has "canned" roles based on a known - # collection type (dict, set, list). Apply those roles - # as needed to the "roles" dictionary, and also - # prepare "decorator" methods + """ collection_type = util.duck_type_collection(cls) if collection_type in __interfaces: canned_roles, decorators = __interfaces[collection_type] @@ -905,11 +927,15 @@ def _instrument_class(cls): for method, decorator in decorators.items(): fn = getattr(cls, method, None) if (fn and method not in methods and - not hasattr(fn, '_sa_instrumented')): + not hasattr(fn, '_sa_instrumented')): setattr(cls, method, decorator(fn)) - # ensure all roles are present, and apply implicit instrumentation if - # needed + +def _assert_required_roles(cls, roles, methods): + """ensure all roles are present, and apply implicit instrumentation if + needed + + """ if 'appender' not in roles or not hasattr(cls, roles['appender']): raise sa_exc.ArgumentError( "Type %s must elect an appender method to be " @@ -931,8 +957,12 @@ def _instrument_class(cls): "Type %s must elect an iterator method to be " "a collection class" % cls.__name__) - # apply ad-hoc instrumentation from decorators, class-level defaults - # and implicit role declarations + +def _set_collection_attributes(cls, roles, methods): + """apply ad-hoc instrumentation from decorators, class-level defaults + and implicit role declarations + + """ for method_name, (before, argument, after) in methods.items(): setattr(cls, method_name, _instrument_membership_mutator(getattr(cls, method_name), @@ -942,19 +972,19 @@ def _instrument_class(cls): setattr(cls, '_sa_%s' % role, getattr(cls, method_name)) cls._sa_adapter = None - if not hasattr(cls, '_sa_linker'): - cls._sa_linker = None + if not hasattr(cls, '_sa_converter'): cls._sa_converter = None cls._sa_instrumented = id(cls) def _instrument_membership_mutator(method, before, argument, after): - """Route method args and/or return value through the collection adapter.""" + """Route method args and/or return value through the collection + adapter.""" # This isn't smart enough to handle @adds(1) for 'def fn(self, (a, b))' if before: - fn_args = list(util.flatten_iterator(inspect.getargspec(method)[0])) - if type(argument) is int: + fn_args = list(util.flatten_iterator(inspect_getargspec(method)[0])) + if isinstance(argument, int): pos_arg = argument named_arg = len(fn_args) > argument and fn_args[argument] or None else: @@ -1144,8 +1174,8 @@ def extend(self, iterable): def __iadd__(fn): def __iadd__(self, iterable): - # list.__iadd__ takes any iterable and seems to let TypeError raise - # as-is instead of returning NotImplemented + # list.__iadd__ takes any iterable and seems to let TypeError + # raise as-is instead of returning NotImplemented for value in iterable: self.append(value) return self @@ -1250,7 +1280,7 @@ def update(self, __other=Unspecified, **kw): if hasattr(__other, 'keys'): for key in list(__other): if (key not in self or - self[key] is not __other[key]): + self[key] is not __other[key]): self[key] = __other[key] else: for key, value in __other: @@ -1446,23 +1476,23 @@ class InstrumentedDict(dict): list: InstrumentedList, set: InstrumentedSet, dict: InstrumentedDict, - } +} __interfaces = { list: ( {'appender': 'append', 'remover': 'remove', - 'iterator': '__iter__'}, _list_decorators() - ), + 'iterator': '__iter__'}, _list_decorators() + ), set: ({'appender': 'add', - 'remover': 'remove', - 'iterator': '__iter__'}, _set_decorators() - ), + 'remover': 'remove', + 'iterator': '__iter__'}, _set_decorators() + ), # decorators are required for dicts and object collections. dict: ({'iterator': 'values'}, _dict_decorators()) if util.py3k - else ({'iterator': 'itervalues'}, _dict_decorators()), - } + else ({'iterator': 'itervalues'}, _dict_decorators()), +} class MappedCollection(dict): @@ -1478,8 +1508,8 @@ class MappedCollection(dict): def __init__(self, keyfunc): """Create a new collection with keying provided by keyfunc. - keyfunc may be any callable any callable that takes an object and - returns an object for use as a dictionary key. + keyfunc may be any callable that takes an object and returns an object + for use as a dictionary key. The keyfunc will be called every time the ORM needs to add a member by value-only (such as when loading instances from the database) or @@ -1537,7 +1567,7 @@ def _convert(self, dictlike): "Found incompatible key %r for value %r; this " "collection's " "keying function requires a key of %r for this value." % ( - incoming_key, value, new_key)) + incoming_key, value, new_key)) yield value # ensure instrumentation is associated with diff --git a/lib/sqlalchemy/orm/dependency.py b/lib/sqlalchemy/orm/dependency.py index 34a2af391c..a3e5b12f90 100644 --- a/lib/sqlalchemy/orm/dependency.py +++ b/lib/sqlalchemy/orm/dependency.py @@ -1,5 +1,6 @@ # orm/dependency.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -10,7 +11,7 @@ from .. import sql, util, exc as sa_exc from . import attributes, exc, sync, unitofwork, \ - util as mapperutil + util as mapperutil from .interfaces import ONETOMANY, MANYTOONE, MANYTOMANY @@ -38,10 +39,10 @@ def __init__(self, prop): self.key = prop.key if not self.prop.synchronize_pairs: raise sa_exc.ArgumentError( - "Can't build a DependencyProcessor for relationship %s. " - "No target attributes to populate between parent and " - "child are present" % - self.prop) + "Can't build a DependencyProcessor for relationship %s. " + "No target attributes to populate between parent and " + "child are present" % + self.prop) @classmethod def from_relationship(cls, prop): @@ -69,31 +70,31 @@ def per_property_flush_actions(self, uow): before_delete = unitofwork.ProcessAll(uow, self, True, True) parent_saves = unitofwork.SaveUpdateAll( - uow, - self.parent.primary_base_mapper - ) + uow, + self.parent.primary_base_mapper + ) child_saves = unitofwork.SaveUpdateAll( - uow, - self.mapper.primary_base_mapper - ) + uow, + self.mapper.primary_base_mapper + ) parent_deletes = unitofwork.DeleteAll( - uow, - self.parent.primary_base_mapper - ) + uow, + self.parent.primary_base_mapper + ) child_deletes = unitofwork.DeleteAll( - uow, - self.mapper.primary_base_mapper - ) + uow, + self.mapper.primary_base_mapper + ) self.per_property_dependencies(uow, - parent_saves, - child_saves, - parent_deletes, - child_deletes, - after_save, - before_delete - ) + parent_saves, + child_saves, + parent_deletes, + child_deletes, + after_save, + before_delete + ) def per_state_flush_actions(self, uow, states, isdelete): """establish actions and dependencies related to a flush. @@ -140,45 +141,49 @@ def per_state_flush_actions(self, uow, states, isdelete): # check if the "parent" side is part of the cycle if not isdelete: parent_saves = unitofwork.SaveUpdateAll( - uow, - self.parent.base_mapper) + uow, + self.parent.base_mapper) parent_deletes = before_delete = None if parent_saves in uow.cycles: parent_in_cycles = True else: parent_deletes = unitofwork.DeleteAll( - uow, - self.parent.base_mapper) + uow, + self.parent.base_mapper) parent_saves = after_save = None if parent_deletes in uow.cycles: parent_in_cycles = True # now create actions /dependencies for each state. + for state in states: # detect if there's anything changed or loaded - # by a preprocessor on this state/attribute. if not, - # we should be able to skip it entirely. + # by a preprocessor on this state/attribute. In the + # case of deletes we may try to load missing items here as well. sum_ = state.manager[self.key].impl.get_all_pending( - state, state.dict) + state, state.dict, + self._passive_delete_flag + if isdelete + else attributes.PASSIVE_NO_INITIALIZE) if not sum_: continue if isdelete: before_delete = unitofwork.ProcessState(uow, - self, True, state) + self, True, state) if parent_in_cycles: parent_deletes = unitofwork.DeleteState( - uow, - state, - parent_base_mapper) + uow, + state, + parent_base_mapper) else: after_save = unitofwork.ProcessState(uow, self, False, state) if parent_in_cycles: parent_saves = unitofwork.SaveUpdateState( - uow, - state, - parent_base_mapper) + uow, + state, + parent_base_mapper) if child_in_cycles: child_actions = [] @@ -189,26 +194,26 @@ def per_state_flush_actions(self, uow, states, isdelete): (deleted, listonly) = uow.states[child_state] if deleted: child_action = ( - unitofwork.DeleteState( - uow, child_state, - child_base_mapper), - True) + unitofwork.DeleteState( + uow, child_state, + child_base_mapper), + True) else: child_action = ( - unitofwork.SaveUpdateState( - uow, child_state, - child_base_mapper), - False) + unitofwork.SaveUpdateState( + uow, child_state, + child_base_mapper), + False) child_actions.append(child_action) # establish dependencies between our possibly per-state # parent action and our possibly per-state child action. for child_action, childisdelete in child_actions: self.per_state_dependencies(uow, parent_saves, - parent_deletes, - child_action, - after_save, before_delete, - isdelete, childisdelete) + parent_deletes, + child_action, + after_save, before_delete, + isdelete, childisdelete) def presort_deletes(self, uowcommit, states): return False @@ -235,9 +240,9 @@ def prop_has_changes(self, uowcommit, states, isdelete): # to InstanceState which returns: attribute # has a non-None value, or had one history = uowcommit.get_attribute_history( - s, - self.key, - passive) + s, + self.key, + passive) if history and not history.empty(): return True else: @@ -248,27 +253,27 @@ def prop_has_changes(self, uowcommit, states, isdelete): def _verify_canload(self, state): if self.prop.uselist and state is None: raise exc.FlushError( - "Can't flush None value found in " - "collection %s" % (self.prop, )) + "Can't flush None value found in " + "collection %s" % (self.prop, )) elif state is not None and \ - not self.mapper._canload(state, - allow_subtypes=not self.enable_typechecks): + not self.mapper._canload( + state, allow_subtypes=not self.enable_typechecks): if self.mapper._canload(state, allow_subtypes=True): raise exc.FlushError('Attempting to flush an item of type ' - '%(x)s as a member of collection ' - '"%(y)s". Expected an object of type ' - '%(z)s or a polymorphic subclass of ' - 'this type. If %(x)s is a subclass of ' - '%(z)s, configure mapper "%(zm)s" to ' - 'load this subtype polymorphically, or ' - 'set enable_typechecks=False to allow ' - 'any subtype to be accepted for flush. ' - % { - 'x': state.class_, - 'y': self.prop, - 'z': self.mapper.class_, - 'zm': self.mapper, - }) + '%(x)s as a member of collection ' + '"%(y)s". Expected an object of type ' + '%(z)s or a polymorphic subclass of ' + 'this type. If %(x)s is a subclass of ' + '%(z)s, configure mapper "%(zm)s" to ' + 'load this subtype polymorphically, or ' + 'set enable_typechecks=False to allow ' + 'any subtype to be accepted for flush. ' + % { + 'x': state.class_, + 'y': self.prop, + 'z': self.mapper.class_, + 'zm': self.mapper, + }) else: raise exc.FlushError( 'Attempting to flush an item of type ' @@ -282,7 +287,7 @@ def _verify_canload(self, state): }) def _synchronize(self, state, child, associationrow, - clearkeys, uowcommit): + clearkeys, uowcommit): raise NotImplementedError() def _get_reversed_processed_set(self, uow): @@ -290,20 +295,20 @@ def _get_reversed_processed_set(self, uow): return None process_key = tuple(sorted( - [self.key] + - [p.key for p in self.prop._reverse_property] - )) + [self.key] + + [p.key for p in self.prop._reverse_property] + )) return uow.memo( - ('reverse_key', process_key), - set - ) + ('reverse_key', process_key), + set + ) - def _post_update(self, state, uowcommit, related): + def _post_update(self, state, uowcommit, related, is_m2o_delete=False): for x in related: - if x is not None: + if not is_m2o_delete or x is not None: uowcommit.issue_post_update( - state, - [r for l, r in self.prop.synchronize_pairs] + state, + [r for l, r in self.prop.synchronize_pairs] ) break @@ -317,21 +322,21 @@ def __repr__(self): class OneToManyDP(DependencyProcessor): def per_property_dependencies(self, uow, parent_saves, - child_saves, - parent_deletes, - child_deletes, - after_save, - before_delete, - ): + child_saves, + parent_deletes, + child_deletes, + after_save, + before_delete, + ): if self.post_update: child_post_updates = unitofwork.IssuePostUpdate( - uow, - self.mapper.primary_base_mapper, - False) + uow, + self.mapper.primary_base_mapper, + False) child_pre_updates = unitofwork.IssuePostUpdate( - uow, - self.mapper.primary_base_mapper, - True) + uow, + self.mapper.primary_base_mapper, + True) uow.dependencies.update([ (child_saves, after_save), @@ -357,22 +362,22 @@ def per_property_dependencies(self, uow, parent_saves, ]) def per_state_dependencies(self, uow, - save_parent, - delete_parent, - child_action, - after_save, before_delete, - isdelete, childisdelete): + save_parent, + delete_parent, + child_action, + after_save, before_delete, + isdelete, childisdelete): if self.post_update: child_post_updates = unitofwork.IssuePostUpdate( - uow, - self.mapper.primary_base_mapper, - False) + uow, + self.mapper.primary_base_mapper, + False) child_pre_updates = unitofwork.IssuePostUpdate( - uow, - self.mapper.primary_base_mapper, - True) + uow, + self.mapper.primary_base_mapper, + True) # TODO: this whole block is not covered # by any tests @@ -416,13 +421,13 @@ def presort_deletes(self, uowcommit, states): # child objects the child objects have to have their # foreign key to the parent set to NULL should_null_fks = not self.cascade.delete and \ - not self.passive_deletes == 'all' + not self.passive_deletes == 'all' for state in states: history = uowcommit.get_attribute_history( - state, - self.key, - self._passive_delete_flag) + state, + self.key, + self._passive_delete_flag) if history: for child in history.deleted: if child is not None and self.hasparent(child) is False: @@ -434,8 +439,8 @@ def presort_deletes(self, uowcommit, states): if should_null_fks: for child in history.unchanged: if child is not None: - uowcommit.register_object(child, - operation="delete", prop=self.prop) + uowcommit.register_object( + child, operation="delete", prop=self.prop) def presort_saves(self, uowcommit, states): children_added = uowcommit.memo(('children_added', self), set) @@ -449,28 +454,29 @@ def presort_saves(self, uowcommit, states): passive = attributes.PASSIVE_OFF history = uowcommit.get_attribute_history( - state, - self.key, - passive) + state, + self.key, + passive) if history: for child in history.added: if child is not None: uowcommit.register_object(child, cancel_delete=True, - operation="add", - prop=self.prop) + operation="add", + prop=self.prop) children_added.update(history.added) for child in history.deleted: if not self.cascade.delete_orphan: uowcommit.register_object(child, isdelete=False, - operation='delete', - prop=self.prop) + operation='delete', + prop=self.prop) elif self.hasparent(child) is False: - uowcommit.register_object(child, isdelete=True, - operation="delete", prop=self.prop) + uowcommit.register_object( + child, isdelete=True, + operation="delete", prop=self.prop) for c, m, st_, dct_ in self.mapper.cascade_iterator( - 'delete', child): + 'delete', child): uowcommit.register_object( st_, isdelete=True) @@ -480,11 +486,11 @@ def presort_saves(self, uowcommit, states): for child in history.unchanged: if child is not None: uowcommit.register_object( - child, - False, - self.passive_updates, - operation="pk change", - prop=self.prop) + child, + False, + self.passive_updates, + operation="pk change", + prop=self.prop) def process_deletes(self, uowcommit, states): # head object is being deleted, and we manage its list of @@ -498,67 +504,67 @@ def process_deletes(self, uowcommit, states): for state in states: history = uowcommit.get_attribute_history( - state, - self.key, - self._passive_delete_flag) + state, + self.key, + self._passive_delete_flag) if history: for child in history.deleted: if child is not None and \ - self.hasparent(child) is False: + self.hasparent(child) is False: self._synchronize( - state, - child, - None, True, - uowcommit, False) + state, + child, + None, True, + uowcommit, False) if self.post_update and child: self._post_update(child, uowcommit, [state]) if self.post_update or not self.cascade.delete: for child in set(history.unchanged).\ - difference(children_added): + difference(children_added): if child is not None: self._synchronize( - state, - child, - None, True, - uowcommit, False) + state, + child, + None, True, + uowcommit, False) if self.post_update and child: self._post_update(child, - uowcommit, - [state]) + uowcommit, + [state]) # technically, we can even remove each child from the # collection here too. but this would be a somewhat # inconsistent behavior since it wouldn't happen - #if the old parent wasn't deleted but child was moved. + # if the old parent wasn't deleted but child was moved. def process_saves(self, uowcommit, states): for state in states: history = uowcommit.get_attribute_history( - state, - self.key, - attributes.PASSIVE_NO_INITIALIZE) + state, + self.key, + attributes.PASSIVE_NO_INITIALIZE) if history: for child in history.added: self._synchronize(state, child, None, - False, uowcommit, False) + False, uowcommit, False) if child is not None and self.post_update: self._post_update(child, uowcommit, [state]) for child in history.deleted: if not self.cascade.delete_orphan and \ - not self.hasparent(child): + not self.hasparent(child): self._synchronize(state, child, None, True, - uowcommit, False) + uowcommit, False) if self._pks_changed(uowcommit, state): for child in history.unchanged: self._synchronize(state, child, None, - False, uowcommit, True) + False, uowcommit, True) def _synchronize(self, state, child, - associationrow, clearkeys, uowcommit, - pks_changed): + associationrow, clearkeys, uowcommit, + pks_changed): source = state dest = child self._verify_canload(child) @@ -569,15 +575,15 @@ def _synchronize(self, state, child, sync.clear(dest, self.mapper, self.prop.synchronize_pairs) else: sync.populate(source, self.parent, dest, self.mapper, - self.prop.synchronize_pairs, uowcommit, - self.passive_updates and pks_changed) + self.prop.synchronize_pairs, uowcommit, + self.passive_updates and pks_changed) def _pks_changed(self, uowcommit, state): return sync.source_modified( - uowcommit, - state, - self.parent, - self.prop.synchronize_pairs) + uowcommit, + state, + self.parent, + self.prop.synchronize_pairs) class ManyToOneDP(DependencyProcessor): @@ -586,22 +592,22 @@ def __init__(self, prop): self.mapper._dependency_processors.append(DetectKeySwitch(prop)) def per_property_dependencies(self, uow, - parent_saves, - child_saves, - parent_deletes, - child_deletes, - after_save, - before_delete): + parent_saves, + child_saves, + parent_deletes, + child_deletes, + after_save, + before_delete): if self.post_update: parent_post_updates = unitofwork.IssuePostUpdate( - uow, - self.parent.primary_base_mapper, - False) + uow, + self.parent.primary_base_mapper, + False) parent_pre_updates = unitofwork.IssuePostUpdate( - uow, - self.parent.primary_base_mapper, - True) + uow, + self.parent.primary_base_mapper, + True) uow.dependencies.update([ (child_saves, after_save), @@ -622,19 +628,19 @@ def per_property_dependencies(self, uow, ]) def per_state_dependencies(self, uow, - save_parent, - delete_parent, - child_action, - after_save, before_delete, - isdelete, childisdelete): + save_parent, + delete_parent, + child_action, + after_save, before_delete, + isdelete, childisdelete): if self.post_update: if not isdelete: parent_post_updates = unitofwork.IssuePostUpdate( - uow, - self.parent.primary_base_mapper, - False) + uow, + self.parent.primary_base_mapper, + False) if childisdelete: uow.dependencies.update([ (after_save, parent_post_updates), @@ -649,9 +655,9 @@ def per_state_dependencies(self, uow, ]) else: parent_pre_updates = unitofwork.IssuePostUpdate( - uow, - self.parent.primary_base_mapper, - True) + uow, + self.parent.primary_base_mapper, + True) uow.dependencies.update([ (before_delete, parent_pre_updates), @@ -680,9 +686,9 @@ def presort_deletes(self, uowcommit, states): if self.cascade.delete or self.cascade.delete_orphan: for state in states: history = uowcommit.get_attribute_history( - state, - self.key, - self._passive_delete_flag) + state, + self.key, + self._passive_delete_flag) if history: if self.cascade.delete_orphan: todelete = history.sum() @@ -691,8 +697,9 @@ def presort_deletes(self, uowcommit, states): for child in todelete: if child is None: continue - uowcommit.register_object(child, isdelete=True, - operation="delete", prop=self.prop) + uowcommit.register_object( + child, isdelete=True, + operation="delete", prop=self.prop) t = self.mapper.cascade_iterator('delete', child) for c, m, st_, dct_ in t: uowcommit.register_object( @@ -703,14 +710,15 @@ def presort_saves(self, uowcommit, states): uowcommit.register_object(state, operation="add", prop=self.prop) if self.cascade.delete_orphan: history = uowcommit.get_attribute_history( - state, - self.key, - self._passive_delete_flag) + state, + self.key, + self._passive_delete_flag) if history: for child in history.deleted: if self.hasparent(child) is False: - uowcommit.register_object(child, isdelete=True, - operation="delete", prop=self.prop) + uowcommit.register_object( + child, isdelete=True, + operation="delete", prop=self.prop) t = self.mapper.cascade_iterator('delete', child) for c, m, st_, dct_ in t: @@ -728,35 +736,37 @@ def process_deletes(self, uowcommit, states): self._synchronize(state, None, None, True, uowcommit) if state and self.post_update: history = uowcommit.get_attribute_history( - state, - self.key, - self._passive_delete_flag) + state, + self.key, + self._passive_delete_flag) if history: - self._post_update(state, uowcommit, history.sum()) + self._post_update( + state, uowcommit, history.sum(), + is_m2o_delete=True) def process_saves(self, uowcommit, states): for state in states: history = uowcommit.get_attribute_history( - state, - self.key, - attributes.PASSIVE_NO_INITIALIZE) + state, + self.key, + attributes.PASSIVE_NO_INITIALIZE) if history: - for child in history.added: - self._synchronize(state, child, None, False, - uowcommit, "add") - + if history.added: + for child in history.added: + self._synchronize(state, child, None, False, + uowcommit, "add") if self.post_update: self._post_update(state, uowcommit, history.sum()) def _synchronize(self, state, child, associationrow, - clearkeys, uowcommit, operation=None): + clearkeys, uowcommit, operation=None): if state is None or \ - (not self.post_update and uowcommit.is_deleted(state)): + (not self.post_update and uowcommit.is_deleted(state)): return if operation is not None and \ - child is not None and \ - not uowcommit.session._contains_state(child): + child is not None and \ + not uowcommit.session._contains_state(child): util.warn( "Object of type %s not in session, %s " "operation along '%s' won't proceed" % @@ -768,10 +778,10 @@ def _synchronize(self, state, child, associationrow, else: self._verify_canload(child) sync.populate(child, self.mapper, state, - self.parent, - self.prop.synchronize_pairs, - uowcommit, - False) + self.parent, + self.prop.synchronize_pairs, + uowcommit, + False) class DetectKeySwitch(DependencyProcessor): @@ -791,16 +801,16 @@ def per_property_preprocessors(self, uow): if self.passive_updates: return else: - if False in (prop.passive_updates for \ - prop in self.prop._reverse_property): + if False in (prop.passive_updates for + prop in self.prop._reverse_property): return uow.register_preprocessor(self, False) def per_property_flush_actions(self, uow): parent_saves = unitofwork.SaveUpdateAll( - uow, - self.parent.base_mapper) + uow, + self.parent.base_mapper) after_save = unitofwork.ProcessAll(uow, self, False, False) uow.dependencies.update([ (parent_saves, after_save) @@ -839,8 +849,8 @@ def process_saves(self, uowcommit, states): def _key_switchers(self, uow, states): switched, notswitched = uow.memo( ('pk_switchers', self), - lambda: (set(), set()) - ) + lambda: (set(), set()) + ) allstates = switched.union(notswitched) for s in states: @@ -860,37 +870,35 @@ def _process_key_switches(self, deplist, uowcommit): if not issubclass(state.class_, self.parent.class_): continue dict_ = state.dict - related = state.get_impl(self.key).get(state, dict_, - passive=self._passive_update_flag) + related = state.get_impl(self.key).get( + state, dict_, passive=self._passive_update_flag) if related is not attributes.PASSIVE_NO_RESULT and \ related is not None: related_state = attributes.instance_state(dict_[self.key]) if related_state in switchers: uowcommit.register_object(state, - False, - self.passive_updates) + False, + self.passive_updates) sync.populate( - related_state, - self.mapper, state, - self.parent, self.prop.synchronize_pairs, - uowcommit, self.passive_updates) + related_state, + self.mapper, state, + self.parent, self.prop.synchronize_pairs, + uowcommit, self.passive_updates) def _pks_changed(self, uowcommit, state): - return bool(state.key) and sync.source_modified(uowcommit, - state, - self.mapper, - self.prop.synchronize_pairs) + return bool(state.key) and sync.source_modified( + uowcommit, state, self.mapper, self.prop.synchronize_pairs) class ManyToManyDP(DependencyProcessor): def per_property_dependencies(self, uow, parent_saves, - child_saves, - parent_deletes, - child_deletes, - after_save, - before_delete - ): + child_saves, + parent_deletes, + child_deletes, + after_save, + before_delete + ): uow.dependencies.update([ (parent_saves, after_save), @@ -910,11 +918,11 @@ def per_property_dependencies(self, uow, parent_saves, ]) def per_state_dependencies(self, uow, - save_parent, - delete_parent, - child_action, - after_save, before_delete, - isdelete, childisdelete): + save_parent, + delete_parent, + child_action, + after_save, before_delete, + isdelete, childisdelete): if not isdelete: if childisdelete: uow.dependencies.update([ @@ -941,9 +949,9 @@ def presort_deletes(self, uowcommit, states): # returns True for state in states: uowcommit.get_attribute_history( - state, - self.key, - self._passive_delete_flag) + state, + self.key, + self._passive_delete_flag) def presort_saves(self, uowcommit, states): if not self.passive_updates: @@ -953,9 +961,9 @@ def presort_saves(self, uowcommit, states): for state in states: if self._pks_changed(uowcommit, state): history = uowcommit.get_attribute_history( - state, - self.key, - attributes.PASSIVE_OFF) + state, + self.key, + attributes.PASSIVE_OFF) if not self.cascade.delete_orphan: return @@ -964,17 +972,18 @@ def presort_saves(self, uowcommit, states): # if delete_orphan check is turned on. for state in states: history = uowcommit.get_attribute_history( - state, - self.key, - attributes.PASSIVE_NO_INITIALIZE) + state, + self.key, + attributes.PASSIVE_NO_INITIALIZE) if history: for child in history.deleted: if self.hasparent(child) is False: - uowcommit.register_object(child, isdelete=True, - operation="delete", prop=self.prop) + uowcommit.register_object( + child, isdelete=True, + operation="delete", prop=self.prop) for c, m, st_, dct_ in self.mapper.cascade_iterator( - 'delete', - child): + 'delete', + child): uowcommit.register_object( st_, isdelete=True) @@ -989,9 +998,9 @@ def process_deletes(self, uowcommit, states): # this history should be cached already, as # we loaded it in preprocess_deletes history = uowcommit.get_attribute_history( - state, - self.key, - self._passive_delete_flag) + state, + self.key, + self._passive_delete_flag) if history: for child in history.non_added(): if child is None or \ @@ -1000,10 +1009,10 @@ def process_deletes(self, uowcommit, states): continue associationrow = {} if not self._synchronize( - state, - child, - associationrow, - False, uowcommit, "delete"): + state, + child, + associationrow, + False, uowcommit, "delete"): continue secondary_delete.append(associationrow) @@ -1013,7 +1022,7 @@ def process_deletes(self, uowcommit, states): processed.update(tmp) self._run_crud(uowcommit, secondary_insert, - secondary_update, secondary_delete) + secondary_update, secondary_delete) def process_saves(self, uowcommit, states): secondary_delete = [] @@ -1025,23 +1034,23 @@ def process_saves(self, uowcommit, states): for state in states: need_cascade_pks = not self.passive_updates and \ - self._pks_changed(uowcommit, state) + self._pks_changed(uowcommit, state) if need_cascade_pks: passive = attributes.PASSIVE_OFF else: passive = attributes.PASSIVE_NO_INITIALIZE history = uowcommit.get_attribute_history(state, self.key, - passive) + passive) if history: for child in history.added: if (processed is not None and - (state, child) in processed): + (state, child) in processed): continue associationrow = {} if not self._synchronize(state, - child, - associationrow, - False, uowcommit, "add"): + child, + associationrow, + False, uowcommit, "add"): continue secondary_insert.append(associationrow) for child in history.deleted: @@ -1050,14 +1059,14 @@ def process_saves(self, uowcommit, states): continue associationrow = {} if not self._synchronize(state, - child, - associationrow, - False, uowcommit, "delete"): + child, + associationrow, + False, uowcommit, "delete"): continue secondary_delete.append(associationrow) tmp.update((c, state) - for c in history.added + history.deleted) + for c in history.added + history.deleted) if need_cascade_pks: @@ -1080,45 +1089,46 @@ def process_saves(self, uowcommit, states): processed.update(tmp) self._run_crud(uowcommit, secondary_insert, - secondary_update, secondary_delete) + secondary_update, secondary_delete) def _run_crud(self, uowcommit, secondary_insert, - secondary_update, secondary_delete): + secondary_update, secondary_delete): connection = uowcommit.transaction.connection(self.mapper) if secondary_delete: associationrow = secondary_delete[0] statement = self.secondary.delete(sql.and_(*[ - c == sql.bindparam(c.key, type_=c.type) - for c in self.secondary.c - if c.key in associationrow - ])) + c == sql.bindparam(c.key, type_=c.type) + for c in self.secondary.c + if c.key in associationrow + ])) result = connection.execute(statement, secondary_delete) if result.supports_sane_multi_rowcount() and \ - result.rowcount != len(secondary_delete): + result.rowcount != len(secondary_delete): raise exc.StaleDataError( "DELETE statement on table '%s' expected to delete " "%d row(s); Only %d were matched." % (self.secondary.description, len(secondary_delete), - result.rowcount) + result.rowcount) ) if secondary_update: associationrow = secondary_update[0] statement = self.secondary.update(sql.and_(*[ - c == sql.bindparam("old_" + c.key, type_=c.type) - for c in self.secondary.c - if c.key in associationrow - ])) + c == sql.bindparam("old_" + c.key, type_=c.type) + for c in self.secondary.c + if c.key in associationrow + ])) result = connection.execute(statement, secondary_update) + if result.supports_sane_multi_rowcount() and \ - result.rowcount != len(secondary_update): + result.rowcount != len(secondary_update): raise exc.StaleDataError( "UPDATE statement on table '%s' expected to update " "%d row(s); Only %d were matched." % (self.secondary.description, len(secondary_update), - result.rowcount) + result.rowcount) ) if secondary_insert: @@ -1126,7 +1136,7 @@ def _run_crud(self, uowcommit, secondary_insert, connection.execute(statement, secondary_insert) def _synchronize(self, state, child, associationrow, - clearkeys, uowcommit, operation): + clearkeys, uowcommit, operation): # this checks for None if uselist=True self._verify_canload(child) @@ -1145,18 +1155,18 @@ def _synchronize(self, state, child, associationrow, return False sync.populate_dict(state, self.parent, associationrow, - self.prop.synchronize_pairs) + self.prop.synchronize_pairs) sync.populate_dict(child, self.mapper, associationrow, - self.prop.secondary_synchronize_pairs) + self.prop.secondary_synchronize_pairs) return True def _pks_changed(self, uowcommit, state): return sync.source_modified( - uowcommit, - state, - self.parent, - self.prop.synchronize_pairs) + uowcommit, + state, + self.parent, + self.prop.synchronize_pairs) _direction_to_processor = { ONETOMANY: OneToManyDP, diff --git a/lib/sqlalchemy/orm/deprecated_interfaces.py b/lib/sqlalchemy/orm/deprecated_interfaces.py index 020b7c7186..6477e8291f 100644 --- a/lib/sqlalchemy/orm/deprecated_interfaces.py +++ b/lib/sqlalchemy/orm/deprecated_interfaces.py @@ -1,5 +1,6 @@ # orm/deprecated_interfaces.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -7,6 +8,7 @@ from .. import event, util from .interfaces import EXT_CONTINUE + @util.langhelpers.dependency_for("sqlalchemy.orm.interfaces") class MapperExtension(object): """Base implementation for :class:`.Mapper` event hooks. @@ -63,20 +65,16 @@ def _adapt_listener(cls, self, listener): cls._adapt_listener_methods( self, listener, ( - 'init_instance', - 'init_failed', - 'translate_row', - 'create_instance', - 'append_result', - 'populate_instance', - 'reconstruct_instance', - 'before_insert', - 'after_insert', - 'before_update', - 'after_update', - 'before_delete', - 'after_delete' - )) + 'init_instance', + 'init_failed', + 'reconstruct_instance', + 'before_insert', + 'after_insert', + 'before_update', + 'after_update', + 'before_delete', + 'after_delete' + )) @classmethod def _adapt_listener_methods(cls, self, listener, methods): @@ -92,29 +90,30 @@ def reconstruct(instance, ctx): ls_meth(self, instance) return reconstruct event.listen(self.class_manager, 'load', - go(ls_meth), raw=False, propagate=True) + go(ls_meth), raw=False, propagate=True) elif meth == 'init_instance': def go(ls_meth): def init_instance(instance, args, kwargs): ls_meth(self, self.class_, - self.class_manager.original_init, - instance, args, kwargs) + self.class_manager.original_init, + instance, args, kwargs) return init_instance event.listen(self.class_manager, 'init', - go(ls_meth), raw=False, propagate=True) + go(ls_meth), raw=False, propagate=True) elif meth == 'init_failed': def go(ls_meth): def init_failed(instance, args, kwargs): - util.warn_exception(ls_meth, self, self.class_, - self.class_manager.original_init, - instance, args, kwargs) + util.warn_exception( + ls_meth, self, self.class_, + self.class_manager.original_init, + instance, args, kwargs) return init_failed event.listen(self.class_manager, 'init_failure', - go(ls_meth), raw=False, propagate=True) + go(ls_meth), raw=False, propagate=True) else: event.listen(self, "%s" % meth, ls_meth, - raw=False, retval=True, propagate=True) + raw=False, retval=True, propagate=True) def instrument_class(self, mapper, class_): """Receive a class when the mapper is first constructed, and has @@ -127,7 +126,7 @@ def instrument_class(self, mapper, class_): return EXT_CONTINUE def init_instance(self, mapper, class_, oldinit, instance, args, kwargs): - """Receive an instance when it's constructor is called. + """Receive an instance when its constructor is called. This method is only called during a userland construction of an object. It is not called when an object is loaded from the @@ -140,7 +139,7 @@ def init_instance(self, mapper, class_, oldinit, instance, args, kwargs): return EXT_CONTINUE def init_failed(self, mapper, class_, oldinit, instance, args, kwargs): - """Receive an instance when it's constructor has been called, + """Receive an instance when its constructor has been called, and raised an exception. This method is only called during a userland construction of @@ -153,108 +152,6 @@ def init_failed(self, mapper, class_, oldinit, instance, args, kwargs): """ return EXT_CONTINUE - def translate_row(self, mapper, context, row): - """Perform pre-processing on the given result row and return a - new row instance. - - This is called when the mapper first receives a row, before - the object identity or the instance itself has been derived - from that row. The given row may or may not be a - ``RowProxy`` object - it will always be a dictionary-like - object which contains mapped columns as keys. The - returned object should also be a dictionary-like object - which recognizes mapped columns as keys. - - If the ultimate return value is EXT_CONTINUE, the row - is not translated. - - """ - return EXT_CONTINUE - - def create_instance(self, mapper, selectcontext, row, class_): - """Receive a row when a new object instance is about to be - created from that row. - - The method can choose to create the instance itself, or it can return - EXT_CONTINUE to indicate normal object creation should take place. - - mapper - The mapper doing the operation - - selectcontext - The QueryContext generated from the Query. - - row - The result row from the database - - class\_ - The class we are mapping. - - return value - A new object instance, or EXT_CONTINUE - - """ - return EXT_CONTINUE - - def append_result(self, mapper, selectcontext, row, instance, - result, **flags): - """Receive an object instance before that instance is appended - to a result list. - - If this method returns EXT_CONTINUE, result appending will proceed - normally. if this method returns any other value or None, - result appending will not proceed for this instance, giving - this extension an opportunity to do the appending itself, if - desired. - - mapper - The mapper doing the operation. - - selectcontext - The QueryContext generated from the Query. - - row - The result row from the database. - - instance - The object instance to be appended to the result. - - result - List to which results are being appended. - - \**flags - extra information about the row, same as criterion in - ``create_row_processor()`` method of - :class:`~sqlalchemy.orm.interfaces.MapperProperty` - """ - - return EXT_CONTINUE - - def populate_instance(self, mapper, selectcontext, row, - instance, **flags): - """Receive an instance before that instance has - its attributes populated. - - This usually corresponds to a newly loaded instance but may - also correspond to an already-loaded instance which has - unloaded attributes to be populated. The method may be called - many times for a single instance, as multiple result rows are - used to populate eagerly loaded collections. - - If this method returns EXT_CONTINUE, instance population will - proceed normally. If any other value or None is returned, - instance population will not proceed, giving this extension an - opportunity to populate the instance itself, if desired. - - .. deprecated:: 0.5 - Most usages of this hook are obsolete. For a - generic "object has been newly created from a row" hook, use - ``reconstruct_instance()``, or the ``@orm.reconstructor`` - decorator. - - """ - return EXT_CONTINUE - def reconstruct_instance(self, mapper, instance): """Receive an object instance after it has been created via ``__new__``, and after initial attribute population has @@ -554,14 +451,14 @@ def set(self, state, value, oldvalue, initiator): @classmethod def _adapt_listener(cls, self, listener): event.listen(self, 'append', listener.append, - active_history=listener.active_history, - raw=True, retval=True) + active_history=listener.active_history, + raw=True, retval=True) event.listen(self, 'remove', listener.remove, - active_history=listener.active_history, - raw=True, retval=True) + active_history=listener.active_history, + raw=True, retval=True) event.listen(self, 'set', listener.set, - active_history=listener.active_history, - raw=True, retval=True) + active_history=listener.active_history, + raw=True, retval=True) def append(self, state, value, initiator): """Receive a collection append event. diff --git a/lib/sqlalchemy/orm/descriptor_props.py b/lib/sqlalchemy/orm/descriptor_props.py index 9ecc9bb626..6c87ef9ba9 100644 --- a/lib/sqlalchemy/orm/descriptor_props.py +++ b/lib/sqlalchemy/orm/descriptor_props.py @@ -1,5 +1,6 @@ # orm/descriptor_props.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -38,7 +39,7 @@ def __init__(self, key): if hasattr(prop, 'get_history'): def get_history(self, state, dict_, - passive=attributes.PASSIVE_OFF): + passive=attributes.PASSIVE_OFF): return prop.get_history(state, dict_, passive) if self.descriptor is None: @@ -62,16 +63,15 @@ def fget(obj): fdel=fdel, ) - proxy_attr = attributes.\ - create_proxied_attribute(self.descriptor)\ - ( - self.parent.class_, - self.key, - self.descriptor, - lambda: self._comparator_factory(mapper), - doc=self.doc, - original_property=self - ) + proxy_attr = attributes.create_proxied_attribute( + self.descriptor)( + self.parent.class_, + self.key, + self.descriptor, + lambda: self._comparator_factory(mapper), + doc=self.doc, + original_property=self + ) proxy_attr.impl = _ProxyImpl(self.key) mapper.class_manager.instrument_attribute(self.key, proxy_attr) @@ -89,11 +89,12 @@ class CompositeProperty(DescriptorProperty): :ref:`mapper_composite` """ + def __init__(self, class_, *attrs, **kwargs): """Return a composite column-based property for use with a Mapper. - See the mapping documentation section :ref:`mapper_composite` for a full - usage example. + See the mapping documentation section :ref:`mapper_composite` for a + full usage example. The :class:`.MapperProperty` returned by :func:`.composite` is the :class:`.CompositeProperty`. @@ -117,13 +118,14 @@ def __init__(self, class_, *attrs, **kwargs): A group name for this property when marked as deferred. :param deferred: - When True, the column property is "deferred", meaning that it does not - load immediately, and is instead loaded when the attribute is first - accessed on an instance. See also :func:`~sqlalchemy.orm.deferred`. + When True, the column property is "deferred", meaning that it does + not load immediately, and is instead loaded when the attribute is + first accessed on an instance. See also + :func:`~sqlalchemy.orm.deferred`. :param comparator_factory: a class which extends - :class:`.CompositeProperty.Comparator` which provides custom SQL clause - generation for comparison operations. + :class:`.CompositeProperty.Comparator` which provides custom SQL + clause generation for comparison operations. :param doc: optional string that will be applied as the doc on the @@ -137,10 +139,11 @@ def __init__(self, class_, *attrs, **kwargs): :param extension: an :class:`.AttributeExtension` instance, or list of extensions, which will be prepended to the list of - attribute listeners for the resulting descriptor placed on the class. - **Deprecated.** Please see :class:`.AttributeEvents`. + attribute listeners for the resulting descriptor placed on the + class. **Deprecated.** Please see :class:`.AttributeEvents`. """ + super(CompositeProperty, self).__init__() self.attrs = attrs self.composite_class = class_ @@ -148,14 +151,13 @@ def __init__(self, class_, *attrs, **kwargs): self.deferred = kwargs.get('deferred', False) self.group = kwargs.get('group', None) self.comparator_factory = kwargs.pop('comparator_factory', - self.__class__.Comparator) + self.__class__.Comparator) if 'info' in kwargs: self.info = kwargs.pop('info') util.set_creation_order(self) self._create_descriptor() - def instrument_class(self, mapper): super(CompositeProperty, self).instrument_class(mapper) self._setup_event_handlers() @@ -240,16 +242,17 @@ def props(self): props = [] for attr in self.attrs: if isinstance(attr, str): - prop = self.parent.get_property(attr, _configure_mappers=False) + prop = self.parent.get_property( + attr, _configure_mappers=False) elif isinstance(attr, schema.Column): prop = self.parent._columntoproperty[attr] elif isinstance(attr, attributes.InstrumentedAttribute): prop = attr.property else: raise sa_exc.ArgumentError( - "Composite expects Column objects or mapped " - "attributes/attribute names as arguments, got: %r" - % (attr,)) + "Composite expects Column objects or mapped " + "attributes/attribute names as arguments, got: %r" + % (attr,)) props.append(prop) return props @@ -267,8 +270,8 @@ def _setup_arguments_on_columns(self): if self.deferred: prop.deferred = self.deferred prop.strategy_class = prop._strategy_lookup( - ("deferred", True), - ("instrument", True)) + ("deferred", True), + ("instrument", True)) prop.group = self.group def _setup_event_handlers(self): @@ -287,11 +290,11 @@ def load_handler(state, *args): if k not in dict_: return - #assert self.key not in dict_ + # assert self.key not in dict_ dict_[self.key] = self.composite_class( - *[state.dict[key] for key in - self._attribute_keys] - ) + *[state.dict[key] for key in + self._attribute_keys] + ) def expire_handler(state, keys): if keys is None or set(self._attribute_keys).intersection(keys): @@ -308,15 +311,15 @@ def insert_update_handler(mapper, connection, state): state.dict.pop(self.key, None) event.listen(self.parent, 'after_insert', - insert_update_handler, raw=True) + insert_update_handler, raw=True) event.listen(self.parent, 'after_update', - insert_update_handler, raw=True) + insert_update_handler, raw=True) event.listen(self.parent, 'load', - load_handler, raw=True, propagate=True) + load_handler, raw=True, propagate=True) event.listen(self.parent, 'refresh', - load_handler, raw=True, propagate=True) + load_handler, raw=True, propagate=True) event.listen(self.parent, 'expire', - expire_handler, raw=True, propagate=True) + expire_handler, raw=True, propagate=True) # TODO: need a deserialize hook here @@ -367,14 +370,14 @@ class CompositeBundle(query.Bundle): def __init__(self, property, expr): self.property = property super(CompositeProperty.CompositeBundle, self).__init__( - property.key, *expr) + property.key, *expr) def create_row_processor(self, query, procs, labels): - def proc(row, result): - return self.property.composite_class(*[proc(row, result) for proc in procs]) + def proc(row): + return self.property.composite_class( + *[proc(row) for proc in procs]) return proc - class Comparator(PropComparator): """Produce boolean, comparison, and other operators for :class:`.CompositeProperty` attributes. @@ -394,7 +397,6 @@ class Comparator(PropComparator): """ - __hash__ = None @property @@ -402,20 +404,22 @@ def clauses(self): return self.__clause_element__() def __clause_element__(self): - return expression.ClauseList(group=False, *self._comparable_elements) + return expression.ClauseList( + group=False, *self._comparable_elements) def _query_clause_element(self): - return CompositeProperty.CompositeBundle(self.prop, self.__clause_element__()) + return CompositeProperty.CompositeBundle( + self.prop, self.__clause_element__()) @util.memoized_property def _comparable_elements(self): if self._adapt_to_entity: return [ - getattr( - self._adapt_to_entity.entity, - prop.key - ) for prop in self.prop._comparable_elements - ] + getattr( + self._adapt_to_entity.entity, + prop.key + ) for prop in self.prop._comparable_elements + ] else: return self.prop._comparable_elements @@ -468,11 +472,12 @@ def _comparator_factory(self, mapper): return comparator_callable def __init__(self): + super(ConcreteInheritedProperty, self).__init__() def warn(): raise AttributeError("Concrete %s does not implement " - "attribute %r at the instance level. Add this " - "property explicitly to %s." % - (self.parent, self.key, self.parent)) + "attribute %r at the instance level. Add " + "this property explicitly to %s." % + (self.parent, self.key, self.parent)) class NoninheritedConcreteProp(object): def __set__(s, obj, value): @@ -492,8 +497,8 @@ def __get__(s, obj, owner): class SynonymProperty(DescriptorProperty): def __init__(self, name, map_column=None, - descriptor=None, comparator_factory=None, - doc=None): + descriptor=None, comparator_factory=None, + doc=None, info=None): """Denote an attribute name as a synonym to a mapped property, in that the attribute will mirror the value and expression behavior of another attribute. @@ -522,11 +527,16 @@ class MyClass(Base): job_status = synonym("_job_status", map_column=True) The above class ``MyClass`` will now have the ``job_status`` - :class:`.Column` object mapped to the attribute named ``_job_status``, - and the attribute named ``job_status`` will refer to the synonym - itself. This feature is typically used in conjunction with the - ``descriptor`` argument in order to link a user-defined descriptor - as a "wrapper" for an existing column. + :class:`.Column` object mapped to the attribute named + ``_job_status``, and the attribute named ``job_status`` will refer + to the synonym itself. This feature is typically used in + conjunction with the ``descriptor`` argument in order to link a + user-defined descriptor as a "wrapper" for an existing column. + + :param info: Optional data dictionary which will be populated into the + :attr:`.InspectionAttr.info` attribute of this object. + + .. versionadded:: 1.0.0 :param comparator_factory: A subclass of :class:`.PropComparator` that will provide custom comparison behavior at the SQL expression @@ -547,12 +557,15 @@ class MyClass(Base): more complicated attribute-wrapping schemes than synonyms. """ + super(SynonymProperty, self).__init__() self.name = name self.map_column = map_column self.descriptor = descriptor self.comparator_factory = comparator_factory self.doc = doc or (descriptor and descriptor.__doc__) or None + if info: + self.info = info util.set_creation_order(self) @@ -579,12 +592,12 @@ def set_parent(self, parent, init): raise sa_exc.ArgumentError( "Can't compile synonym '%s': no column on table " "'%s' named '%s'" - % (self.name, parent.mapped_table.description, self.key)) + % (self.name, parent.mapped_table.description, self.key)) elif parent.mapped_table.c[self.key] in \ parent._columntoproperty and \ parent._columntoproperty[ - parent.mapped_table.c[self.key] - ].key == self.name: + parent.mapped_table.c[self.key] + ].key == self.name: raise sa_exc.ArgumentError( "Can't call map_column=True for synonym %r=%r, " "a ColumnProperty already exists keyed to the name " @@ -593,9 +606,9 @@ def set_parent(self, parent, init): ) p = properties.ColumnProperty(parent.mapped_table.c[self.key]) parent._configure_property( - self.name, p, - init=init, - setparent=True) + self.name, p, + init=init, + setparent=True) p._mapped_by_synonym = self.key self.parent = parent @@ -605,7 +618,8 @@ def set_parent(self, parent, init): class ComparableProperty(DescriptorProperty): """Instruments a Python property for use in query expressions.""" - def __init__(self, comparator_factory, descriptor=None, doc=None): + def __init__( + self, comparator_factory, descriptor=None, doc=None, info=None): """Provides a method of applying a :class:`.PropComparator` to any Python descriptor attribute. @@ -645,7 +659,8 @@ class SearchWord(Base): id = Column(Integer, primary_key=True) word = Column(String) word_insensitive = comparable_property(lambda prop, mapper: - CaseInsensitiveComparator(mapper.c.word, mapper) + CaseInsensitiveComparator( + mapper.c.word, mapper) ) @@ -666,13 +681,19 @@ class SearchWord(Base): The like-named descriptor will be automatically retrieved from the mapped class if left blank in a ``properties`` declaration. + :param info: Optional data dictionary which will be populated into the + :attr:`.InspectionAttr.info` attribute of this object. + + .. versionadded:: 1.0.0 + """ + super(ComparableProperty, self).__init__() self.descriptor = descriptor self.comparator_factory = comparator_factory self.doc = doc or (descriptor and descriptor.__doc__) or None + if info: + self.info = info util.set_creation_order(self) def _comparator_factory(self, mapper): return self.comparator_factory(self, mapper) - - diff --git a/lib/sqlalchemy/orm/dynamic.py b/lib/sqlalchemy/orm/dynamic.py index bae09d32d5..88187cdcf0 100644 --- a/lib/sqlalchemy/orm/dynamic.py +++ b/lib/sqlalchemy/orm/dynamic.py @@ -1,5 +1,6 @@ # orm/dynamic.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -16,9 +17,10 @@ from . import ( attributes, object_session, util as orm_util, strategies, object_mapper, exc as orm_exc, properties - ) +) from .query import Query + @log.class_logger @properties.RelationshipProperty.strategy_for(lazy="dynamic") class DynaLoader(strategies.AbstractRelationshipLoader): @@ -29,7 +31,8 @@ def init_class_attribute(self, mapper): "On relationship %s, 'dynamic' loaders cannot be used with " "many-to-one/one-to-one relationships and/or " "uselist=False." % self.parent_property) - strategies._register_attribute(self, + strategies._register_attribute( + self, mapper, useobject=True, uselist=True, @@ -40,6 +43,7 @@ def init_class_attribute(self, mapper): backref=self.parent_property.back_populates, ) + class DynamicAttributeImpl(attributes.AttributeImpl): uses_objects = True accepts_scalar_loader = False @@ -47,10 +51,10 @@ class DynamicAttributeImpl(attributes.AttributeImpl): collection = False def __init__(self, class_, key, typecallable, - dispatch, - target_mapper, order_by, query_class=None, **kw): + dispatch, + target_mapper, order_by, query_class=None, **kw): super(DynamicAttributeImpl, self).\ - __init__(class_, key, typecallable, dispatch, **kw) + __init__(class_, key, typecallable, dispatch, **kw) self.target_mapper = target_mapper self.order_by = order_by if not query_class: @@ -62,16 +66,16 @@ def __init__(self, class_, key, typecallable, def get(self, state, dict_, passive=attributes.PASSIVE_OFF): if not passive & attributes.SQL_OK: - return self._get_collection_history(state, - attributes.PASSIVE_NO_INITIALIZE).added_items + return self._get_collection_history( + state, attributes.PASSIVE_NO_INITIALIZE).added_items else: return self.query_class(self, state) def get_collection(self, state, dict_, user_data=None, - passive=attributes.PASSIVE_NO_INITIALIZE): + passive=attributes.PASSIVE_NO_INITIALIZE): if not passive & attributes.SQL_OK: return self._get_collection_history(state, - passive).added_items + passive).added_items else: history = self._get_collection_history(state, passive) return history.added_plus_unchanged @@ -85,7 +89,7 @@ def _remove_token(self): return attributes.Event(self, attributes.OP_REMOVE) def fire_append_event(self, state, dict_, value, initiator, - collection_history=None): + collection_history=None): if collection_history is None: collection_history = self._modified_event(state, dict_) @@ -98,7 +102,7 @@ def fire_append_event(self, state, dict_, value, initiator, self.sethasparent(attributes.instance_state(value), state, True) def fire_remove_event(self, state, dict_, value, initiator, - collection_history=None): + collection_history=None): if collection_history is None: collection_history = self._modified_event(state, dict_) @@ -116,8 +120,8 @@ def _modified_event(self, state, dict_): state.committed_state[self.key] = CollectionHistory(self, state) state._modified_event(dict_, - self, - attributes.NEVER_SET) + self, + attributes.NEVER_SET) # this is a hack to allow the fixtures.ComparableEntity fixture # to work @@ -125,8 +129,8 @@ def _modified_event(self, state, dict_): return state.committed_state[self.key] def set(self, state, dict_, value, initiator, - passive=attributes.PASSIVE_OFF, - check_old=None, pop=False): + passive=attributes.PASSIVE_OFF, + check_old=None, pop=False): if initiator and initiator.parent_token is self.parent_token: return @@ -144,7 +148,7 @@ def _set_iterable(self, state, dict_, iterable, adapter=None): old_collection = collection_history.added_items else: old_collection = old_collection.union( - collection_history.added_items) + collection_history.added_items) idset = util.IdentitySet constants = old_collection.intersection(new_values) @@ -154,11 +158,11 @@ def _set_iterable(self, state, dict_, iterable, adapter=None): for member in new_values: if member in additions: self.fire_append_event(state, dict_, member, None, - collection_history=collection_history) + collection_history=collection_history) for member in removals: self.fire_remove_event(state, dict_, member, None, - collection_history=collection_history) + collection_history=collection_history) def delete(self, *args, **kwargs): raise NotImplementedError() @@ -171,14 +175,15 @@ def get_history(self, state, dict_, passive=attributes.PASSIVE_OFF): c = self._get_collection_history(state, passive) return c.as_history() - def get_all_pending(self, state, dict_): + def get_all_pending(self, state, dict_, + passive=attributes.PASSIVE_NO_INITIALIZE): c = self._get_collection_history( - state, attributes.PASSIVE_NO_INITIALIZE) + state, passive) return [ - (attributes.instance_state(x), x) - for x in - c.all_items - ] + (attributes.instance_state(x), x) + for x in + c.all_items + ] def _get_collection_history(self, state, passive=attributes.PASSIVE_OFF): if self.key in state.committed_state: @@ -192,17 +197,17 @@ def _get_collection_history(self, state, passive=attributes.PASSIVE_OFF): return c def append(self, state, dict_, value, initiator, - passive=attributes.PASSIVE_OFF): + passive=attributes.PASSIVE_OFF): if initiator is not self: self.fire_append_event(state, dict_, value, initiator) def remove(self, state, dict_, value, initiator, - passive=attributes.PASSIVE_OFF): + passive=attributes.PASSIVE_OFF): if initiator is not self: self.fire_remove_event(state, dict_, value, initiator) def pop(self, state, dict_, value, initiator, - passive=attributes.PASSIVE_OFF): + passive=attributes.PASSIVE_OFF): self.remove(state, dict_, value, initiator, passive=passive) @@ -216,11 +221,9 @@ def __init__(self, attr, state): mapper = object_mapper(instance) prop = mapper._props[self.attr.key] - self._criterion = prop.compare( - operators.eq, - instance, - value_is_parent=True, - alias_secondary=False) + self._criterion = prop._with_parent( + instance, + alias_secondary=False) if self.attr.order_by: self._order_by = self.attr.order_by @@ -228,7 +231,7 @@ def __init__(self, attr, state): def session(self): sess = object_session(self.instance) if sess is not None and self.autoflush and sess.autoflush \ - and self.instance in sess: + and self.instance in sess: sess.flush() if not orm_util.has_identity(self.instance): return None @@ -337,7 +340,7 @@ def added_plus_unchanged(self): @property def all_items(self): return list(self.added_items.union( - self.unchanged_items).union(self.deleted_items)) + self.unchanged_items).union(self.deleted_items)) def as_history(self): if self._reconcile_collection: @@ -346,13 +349,13 @@ def as_history(self): unchanged = self.unchanged_items.difference(deleted) else: added, unchanged, deleted = self.added_items,\ - self.unchanged_items,\ - self.deleted_items + self.unchanged_items,\ + self.deleted_items return attributes.History( - list(added), - list(unchanged), - list(deleted), - ) + list(added), + list(unchanged), + list(deleted), + ) def indexed(self, index): return list(self.added_items)[index] @@ -365,4 +368,3 @@ def add_removed(self, value): self.added_items.remove(value) else: self.deleted_items.add(value) - diff --git a/lib/sqlalchemy/orm/evaluator.py b/lib/sqlalchemy/orm/evaluator.py index e1dd960688..6b5da12d99 100644 --- a/lib/sqlalchemy/orm/evaluator.py +++ b/lib/sqlalchemy/orm/evaluator.py @@ -1,5 +1,6 @@ # orm/evaluator.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -13,18 +14,21 @@ class UnevaluatableError(Exception): _straight_ops = set(getattr(operators, op) for op in ('add', 'mul', 'sub', - 'div', - 'mod', 'truediv', + 'div', + 'mod', 'truediv', 'lt', 'le', 'ne', 'gt', 'ge', 'eq')) _notimplemented_ops = set(getattr(operators, op) - for op in ('like_op', 'notlike_op', 'ilike_op', - 'notilike_op', 'between_op', 'in_op', - 'notin_op', 'endswith_op', 'concat_op')) + for op in ('like_op', 'notlike_op', 'ilike_op', + 'notilike_op', 'between_op', 'in_op', + 'notin_op', 'endswith_op', 'concat_op')) class EvaluatorCompiler(object): + def __init__(self, target_cls=None): + self.target_cls = target_cls + def process(self, clause): meth = getattr(self, "visit_%s" % clause.__visit_name__, None) if not meth: @@ -46,10 +50,17 @@ def visit_true(self, clause): def visit_column(self, clause): if 'parentmapper' in clause._annotations: - key = clause._annotations['parentmapper'].\ - _columntoproperty[clause].key + parentmapper = clause._annotations['parentmapper'] + if self.target_cls and not issubclass( + self.target_cls, parentmapper.class_): + raise UnevaluatableError( + "Can't evaluate criteria against alternate class %s" % + parentmapper.class_ + ) + key = parentmapper._columntoproperty[clause].key else: key = clause.key + get_corresponding_attr = operator.attrgetter(key) return lambda obj: get_corresponding_attr(obj) @@ -84,7 +95,7 @@ def evaluate(obj): def visit_binary(self, clause): eval_left, eval_right = list(map(self.process, - [clause.left, clause.right])) + [clause.left, clause.right])) operator = clause.operator if operator is operators.is_: def evaluate(obj): @@ -101,8 +112,8 @@ def evaluate(obj): return operator(eval_left(obj), eval_right(obj)) else: raise UnevaluatableError( - "Cannot evaluate %s with operator %s" % - (type(clause).__name__, clause.operator)) + "Cannot evaluate %s with operator %s" % + (type(clause).__name__, clause.operator)) return evaluate def visit_unary(self, clause): @@ -115,9 +126,12 @@ def evaluate(obj): return not value return evaluate raise UnevaluatableError( - "Cannot evaluate %s with operator %s" % - (type(clause).__name__, clause.operator)) + "Cannot evaluate %s with operator %s" % + (type(clause).__name__, clause.operator)) def visit_bindparam(self, clause): - val = clause.value + if clause.callable: + val = clause.callable() + else: + val = clause.value return lambda obj: val diff --git a/lib/sqlalchemy/orm/events.py b/lib/sqlalchemy/orm/events.py index 078f4d12f1..67ce46e5a9 100644 --- a/lib/sqlalchemy/orm/events.py +++ b/lib/sqlalchemy/orm/events.py @@ -1,5 +1,6 @@ # orm/events.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -16,6 +17,8 @@ from .session import Session, sessionmaker from .scoping import scoped_session from .attributes import QueryableAttribute +from .query import Query +from sqlalchemy.util.compat import inspect_getargspec class InstrumentationEvents(event.Events): """Events related to class instrumentation events. @@ -49,7 +52,6 @@ class InstrumentationEvents(event.Events): _target_class_doc = "SomeBaseClass" _dispatch_target = instrumentation.InstrumentationFactory - @classmethod def _accept_with(cls, target): if isinstance(target, type): @@ -60,7 +62,8 @@ def _accept_with(cls, target): @classmethod def _listen(cls, event_key, propagate=True, **kw): target, identifier, fn = \ - event_key.dispatch_target, event_key.identifier, event_key.fn + event_key.dispatch_target, event_key.identifier, \ + event_key._listen_fn def listen(target_cls, *arg): listen_cls = target() @@ -70,10 +73,11 @@ def listen(target_cls, *arg): return fn(target_cls, *arg) def remove(ref): - key = event.registry._EventKey(None, identifier, listen, - instrumentation._instrumentation_factory) + key = event.registry._EventKey( + None, identifier, listen, + instrumentation._instrumentation_factory) getattr(instrumentation._instrumentation_factory.dispatch, - identifier).remove(key) + identifier).remove(key) target = weakref.ref(target.class_, remove) @@ -106,17 +110,18 @@ def attribute_instrument(self, cls, key, inst): """Called when an attribute is instrumented.""" - class _InstrumentationEventsHold(object): """temporary marker object used to transfer from _accept_with() to _listen() on the InstrumentationEvents class. """ + def __init__(self, class_): self.class_ = class_ dispatch = event.dispatcher(InstrumentationEvents) + class InstanceEvents(event.Events): """Define events specific to object lifecycle. @@ -189,7 +194,8 @@ def _accept_with(cls, orm, target): @classmethod def _listen(cls, event_key, raw=False, propagate=False, **kw): target, identifier, fn = \ - event_key.dispatch_target, event_key.identifier, event_key.fn + event_key.dispatch_target, event_key.identifier, \ + event_key._listen_fn if not raw: def wrap(state, *arg, **kw): @@ -200,7 +206,8 @@ def wrap(state, *arg, **kw): if propagate: for mgr in target.subclass_managers(True): - event_key.with_dispatch_target(mgr).base_listen(propagate=True) + event_key.with_dispatch_target(mgr).base_listen( + propagate=True) @classmethod def _clear(cls): @@ -210,24 +217,74 @@ def _clear(cls): def first_init(self, manager, cls): """Called when the first instance of a particular mapping is called. + This event is called when the ``__init__`` method of a class + is called the first time for that particular class. The event + invokes before ``__init__`` actually proceeds as well as before + the :meth:`.InstanceEvents.init` event is invoked. + """ def init(self, target, args, kwargs): - """Receive an instance when it's constructor is called. + """Receive an instance when its constructor is called. This method is only called during a userland construction of - an object. It is not called when an object is loaded from the - database. + an object, in conjunction with the object's constructor, e.g. + its ``__init__`` method. It is not called when an object is + loaded from the database; see the :meth:`.InstanceEvents.load` + event in order to intercept a database load. + + The event is called before the actual ``__init__`` constructor + of the object is called. The ``kwargs`` dictionary may be + modified in-place in order to affect what is passed to + ``__init__``. + + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :param args: positional arguments passed to the ``__init__`` method. + This is passed as a tuple and is currently immutable. + :param kwargs: keyword arguments passed to the ``__init__`` method. + This structure *can* be altered in place. + + .. seealso:: + + :meth:`.InstanceEvents.init_failure` + + :meth:`.InstanceEvents.load` """ def init_failure(self, target, args, kwargs): - """Receive an instance when it's constructor has been called, + """Receive an instance when its constructor has been called, and raised an exception. This method is only called during a userland construction of - an object. It is not called when an object is loaded from the - database. + an object, in conjunction with the object's constructor, e.g. + its ``__init__`` method. It is not called when an object is loaded + from the database. + + The event is invoked after an exception raised by the ``__init__`` + method is caught. After the event + is invoked, the original exception is re-raised outwards, so that + the construction of the object still raises an exception. The + actual exception and stack trace raised should be present in + ``sys.exc_info()``. + + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :param args: positional arguments that were passed to the ``__init__`` + method. + :param kwargs: keyword arguments that were passed to the ``__init__`` + method. + + .. seealso:: + + :meth:`.InstanceEvents.init` + + :meth:`.InstanceEvents.load` """ @@ -254,50 +311,74 @@ def load(self, target, context): ``None`` if the load does not correspond to a :class:`.Query`, such as during :meth:`.Session.merge`. + .. seealso:: + + :meth:`.InstanceEvents.init` + + :meth:`.InstanceEvents.refresh` + """ def refresh(self, target, context, attrs): """Receive an object instance after one or more attributes have been refreshed from a query. + Contrast this to the :meth:`.InstanceEvents.load` method, which + is invoked when the object is first loaded from a query. + :param target: the mapped instance. If the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. :param context: the :class:`.QueryContext` corresponding to the current :class:`.Query` in progress. - :param attrs: iterable collection of attribute names which + :param attrs: sequence of attribute names which were populated, or None if all column-mapped, non-deferred attributes were populated. + .. seealso:: + + :meth:`.InstanceEvents.load` + """ - def expire(self, target, attrs): - """Receive an object instance after its attributes or some subset - have been expired. + def refresh_flush(self, target, flush_context, attrs): + """Receive an object instance after one or more attributes have + been refreshed within the persistence of the object. - 'keys' is a list of attribute names. If None, the entire - state was expired. + This event is the same as :meth:`.InstanceEvents.refresh` except + it is invoked within the unit of work flush process, and the values + here typically come from the process of handling an INSERT or + UPDATE, such as via the RETURNING clause or from Python-side default + values. + + .. versionadded:: 1.0.5 :param target: the mapped instance. If the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. - :param attrs: iterable collection of attribute - names which were expired, or None if all attributes were - expired. + :param flush_context: Internal :class:`.UOWTransaction` object + which handles the details of the flush. + :param attrs: sequence of attribute names which + were populated. """ - def resurrect(self, target): - """Receive an object instance as it is 'resurrected' from - garbage collection, which occurs when a "dirty" state falls - out of scope. + def expire(self, target, attrs): + """Receive an object instance after its attributes or some subset + have been expired. + + 'keys' is a list of attribute names. If None, the entire + state was expired. :param target: the mapped instance. If the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. + :param attrs: sequence of attribute + names which were expired, or None if all attributes were + expired. """ @@ -316,7 +397,7 @@ def pickle(self, target, state_dict): """ def unpickle(self, target, state_dict): - """Receive an object instance after it's associated state has + """Receive an object instance after its associated state has been unpickled. :param target: the mapped instance. If @@ -329,6 +410,7 @@ def unpickle(self, target, state_dict): """ + class _EventsHold(event.RefCollection): """Hold onto listeners against unmapped, uninstrumented classes. @@ -336,6 +418,7 @@ class _EventsHold(event.RefCollection): those objects are created for that class. """ + def __init__(self, class_): self.class_ = class_ @@ -386,9 +469,9 @@ def populate(cls, class_, subject): collection = cls.all_holds[subclass] for event_key, raw, propagate in collection.values(): if propagate or subclass is class_: - # since we can't be sure in what order different classes - # in a hierarchy are triggered with populate(), - # we rely upon _EventsHold for all event + # since we can't be sure in what order different + # classes in a hierarchy are triggered with + # populate(), we rely upon _EventsHold for all event # assignment, instead of using the generic propagate # flag. event_key.with_dispatch_target(subject).\ @@ -472,9 +555,7 @@ def my_before_insert_listener(mapper, connection, target): processing normally. * ``sqlalchemy.orm.interfaces.EXT_STOP`` - cancel all subsequent event handlers in the chain. - * other values - the return value specified by specific listeners, - such as :meth:`~.MapperEvents.translate_row` or - :meth:`~.MapperEvents.create_instance`. + * other values - the return value specified by specific listeners. """ @@ -503,23 +584,25 @@ def _accept_with(cls, orm, target): return target @classmethod - def _listen(cls, event_key, raw=False, retval=False, propagate=False, **kw): + def _listen( + cls, event_key, raw=False, retval=False, propagate=False, **kw): target, identifier, fn = \ - event_key.dispatch_target, event_key.identifier, event_key.fn + event_key.dispatch_target, event_key.identifier, \ + event_key._listen_fn if identifier in ("before_configured", "after_configured") and \ - target is not mapperlib.Mapper: + target is not mapperlib.Mapper: util.warn( - "'before_configured' and 'after_configured' ORM events " - "only invoke with the mapper() function or Mapper class " - "as the target.") + "'before_configured' and 'after_configured' ORM events " + "only invoke with the mapper() function or Mapper class " + "as the target.") if not raw or not retval: if not raw: meth = getattr(cls, identifier) try: target_index = \ - inspect.getargspec(meth)[0].index('target') - 1 + inspect_getargspec(meth)[0].index('target') - 1 except ValueError: target_index = None @@ -537,7 +620,7 @@ def wrap(*arg, **kw): if propagate: for mapper in target.self_and_descendants: event_key.with_dispatch_target(mapper).base_listen( - propagate=True, **kw) + propagate=True, **kw) else: event_key.base_listen(**kw) @@ -570,32 +653,67 @@ def on_new_class(mapper, cls_): """ def mapper_configured(self, mapper, class_): - """Called when the mapper for the class is fully configured. - - This event is the latest phase of mapper construction, and - is invoked when the mapped classes are first used, so that - relationships between mappers can be resolved. When the event is - called, the mapper should be in its final state. - - While the configuration event normally occurs automatically, - it can be forced to occur ahead of time, in the case where the event - is needed before any actual mapper usage, by using the - :func:`.configure_mappers` function. + """Called when a specific mapper has completed its own configuration + within the scope of the :func:`.configure_mappers` call. + + The :meth:`.MapperEvents.mapper_configured` event is invoked + for each mapper that is encountered when the + :func:`.orm.configure_mappers` function proceeds through the current + list of not-yet-configured mappers. + :func:`.orm.configure_mappers` is typically invoked + automatically as mappings are first used, as well as each time + new mappers have been made available and new mapper use is + detected. + + When the event is called, the mapper should be in its final + state, but **not including backrefs** that may be invoked from + other mappers; they might still be pending within the + configuration operation. Bidirectional relationships that + are instead configured via the + :paramref:`.orm.relationship.back_populates` argument + *will* be fully available, since this style of relationship does not + rely upon other possibly-not-configured mappers to know that they + exist. + For an event that is guaranteed to have **all** mappers ready + to go including backrefs that are defined only on other + mappings, use the :meth:`.MapperEvents.after_configured` + event; this event invokes only after all known mappings have been + fully configured. + + The :meth:`.MapperEvents.mapper_configured` event, unlike + :meth:`.MapperEvents.before_configured` or + :meth:`.MapperEvents.after_configured`, + is called for each mapper/class individually, and the mapper is + passed to the event itself. It also is called exactly once for + a particular mapper. The event is therefore useful for + configurational steps that benefit from being invoked just once + on a specific mapper basis, which don't require that "backref" + configurations are necessarily ready yet. :param mapper: the :class:`.Mapper` which is the target of this event. :param class\_: the mapped class. + .. seealso:: + + :meth:`.MapperEvents.before_configured` + + :meth:`.MapperEvents.after_configured` + """ # TODO: need coverage for this event def before_configured(self): """Called before a series of mappers have been configured. - This corresponds to the :func:`.orm.configure_mappers` call, which - note is usually called automatically as mappings are first - used. + The :meth:`.MapperEvents.before_configured` event is invoked + each time the :func:`.orm.configure_mappers` function is + invoked, before the function has done any of its work. + :func:`.orm.configure_mappers` is typically invoked + automatically as mappings are first used, as well as each time + new mappers have been made available and new mapper use is + detected. This event can **only** be applied to the :class:`.Mapper` class or :func:`.mapper` function, and not to individual mappings or @@ -607,11 +725,16 @@ def before_configured(self): def go(): # ... + Constrast this event to :meth:`.MapperEvents.after_configured`, + which is invoked after the series of mappers has been configured, + as well as :meth:`.MapperEvents.mapper_configured`, which is invoked + on a per-mapper basis as each one is configured to the extent possible. + Theoretically this event is called once per application, but is actually called any time new mappers are to be affected by a :func:`.orm.configure_mappers` call. If new mappings are constructed after existing ones have - already been used, this event can be called again. To ensure + already been used, this event will likely be called again. To ensure that a particular event is only called once and no further, the ``once=True`` argument (new in 0.9.4) can be applied:: @@ -624,14 +747,33 @@ def go(): .. versionadded:: 0.9.3 + + .. seealso:: + + :meth:`.MapperEvents.mapper_configured` + + :meth:`.MapperEvents.after_configured` + """ def after_configured(self): """Called after a series of mappers have been configured. - This corresponds to the :func:`.orm.configure_mappers` call, which - note is usually called automatically as mappings are first - used. + The :meth:`.MapperEvents.after_configured` event is invoked + each time the :func:`.orm.configure_mappers` function is + invoked, after the function has completed its work. + :func:`.orm.configure_mappers` is typically invoked + automatically as mappings are first used, as well as each time + new mappers have been made available and new mapper use is + detected. + + Contrast this event to the :meth:`.MapperEvents.mapper_configured` + event, which is called on a per-mapper basis while the configuration + operation proceeds; unlike that event, when this event is invoked, + all cross-configurations (e.g. backrefs) will also have been made + available for any mappers that were pending. + Also constrast to :meth:`.MapperEvents.before_configured`, + which is invoked before the series of mappers has been configured. This event can **only** be applied to the :class:`.Mapper` class or :func:`.mapper` function, and not to individual mappings or @@ -647,7 +789,7 @@ def go(): application, but is actually called any time new mappers have been affected by a :func:`.orm.configure_mappers` call. If new mappings are constructed after existing ones have - already been used, this event can be called again. To ensure + already been used, this event will likely be called again. To ensure that a particular event is only called once and no further, the ``once=True`` argument (new in 0.9.4) can be applied:: @@ -657,123 +799,11 @@ def go(): def go(): # ... - """ - - def translate_row(self, mapper, context, row): - """Perform pre-processing on the given result row and return a - new row instance. - - This listener is typically registered with ``retval=True``. - It is called when the mapper first receives a row, before - the object identity or the instance itself has been derived - from that row. The given row may or may not be a - :class:`.RowProxy` object - it will always be a dictionary-like - object which contains mapped columns as keys. The - returned object should also be a dictionary-like object - which recognizes mapped columns as keys. - - :param mapper: the :class:`.Mapper` which is the target - of this event. - :param context: the :class:`.QueryContext`, which includes - a handle to the current :class:`.Query` in progress as well - as additional state information. - :param row: the result row being handled. This may be - an actual :class:`.RowProxy` or may be a dictionary containing - :class:`.Column` objects as keys. - :return: When configured with ``retval=True``, the function - should return a dictionary-like row object, or ``EXT_CONTINUE``, - indicating the original row should be used. - - - """ - - def create_instance(self, mapper, context, row, class_): - """Receive a row when a new object instance is about to be - created from that row. - - The method can choose to create the instance itself, or it can return - EXT_CONTINUE to indicate normal object creation should take place. - This listener is typically registered with ``retval=True``. - - :param mapper: the :class:`.Mapper` which is the target - of this event. - :param context: the :class:`.QueryContext`, which includes - a handle to the current :class:`.Query` in progress as well - as additional state information. - :param row: the result row being handled. This may be - an actual :class:`.RowProxy` or may be a dictionary containing - :class:`.Column` objects as keys. - :param class\_: the mapped class. - :return: When configured with ``retval=True``, the return value - should be a newly created instance of the mapped class, - or ``EXT_CONTINUE`` indicating that default object construction - should take place. - - """ - - def append_result(self, mapper, context, row, target, - result, **flags): - """Receive an object instance before that instance is appended - to a result list. - - This is a rarely used hook which can be used to alter - the construction of a result list returned by :class:`.Query`. - - :param mapper: the :class:`.Mapper` which is the target - of this event. - :param context: the :class:`.QueryContext`, which includes - a handle to the current :class:`.Query` in progress as well - as additional state information. - :param row: the result row being handled. This may be - an actual :class:`.RowProxy` or may be a dictionary containing - :class:`.Column` objects as keys. - :param target: the mapped instance being populated. If - the event is configured with ``raw=True``, this will - instead be the :class:`.InstanceState` state-management - object associated with the instance. - :param result: a list-like object where results are being - appended. - :param \**flags: Additional state information about the - current handling of the row. - :return: If this method is registered with ``retval=True``, - a return value of ``EXT_STOP`` will prevent the instance - from being appended to the given result list, whereas a - return value of ``EXT_CONTINUE`` will result in the default - behavior of appending the value to the result list. - - """ - - def populate_instance(self, mapper, context, row, - target, **flags): - """Receive an instance before that instance has - its attributes populated. + .. seealso:: - This usually corresponds to a newly loaded instance but may - also correspond to an already-loaded instance which has - unloaded attributes to be populated. The method may be called - many times for a single instance, as multiple result rows are - used to populate eagerly loaded collections. + :meth:`.MapperEvents.mapper_configured` - Most usages of this hook are obsolete. For a - generic "object has been newly created from a row" hook, use - :meth:`.InstanceEvents.load`. - - :param mapper: the :class:`.Mapper` which is the target - of this event. - :param context: the :class:`.QueryContext`, which includes - a handle to the current :class:`.Query` in progress as well - as additional state information. - :param row: the result row being handled. This may be - an actual :class:`.RowProxy` or may be a dictionary containing - :class:`.Column` objects as keys. - :param target: the mapped instance. If - the event is configured with ``raw=True``, this will - instead be the :class:`.InstanceState` state-management - object associated with the instance. - :return: When configured with ``retval=True``, a return - value of ``EXT_STOP`` will bypass instance population by - the mapper. A value of ``EXT_CONTINUE`` indicates that - default instance population should take place. + :meth:`.MapperEvents.before_configured` """ @@ -796,30 +826,14 @@ def before_insert(self, mapper, connection, target): steps. .. warning:: - Mapper-level flush events are designed to operate **on attributes - local to the immediate object being handled - and via SQL operations with the given** - :class:`.Connection` **only.** Handlers here should **not** make - alterations to the state of the :class:`.Session` overall, and - in general should not affect any :func:`.relationship` -mapped - attributes, as session cascade rules will not function properly, - nor is it always known if the related class has already been - handled. Operations that **are not supported in mapper - events** include: - - * :meth:`.Session.add` - * :meth:`.Session.delete` - * Mapped collection append, add, remove, delete, discard, etc. - * Mapped relationship attribute set/del events, - i.e. ``someobject.related = someotherobject`` - - Operations which manipulate the state of the object - relative to other objects are better handled: - - * In the ``__init__()`` method of the mapped object itself, or - another method designed to establish some particular state. - * In a ``@validates`` handler, see :ref:`simple_validators` - * Within the :meth:`.SessionEvents.before_flush` event. + + Mapper-level flush events only allow **very limited operations**, + on attributes local to the row being operated upon only, + as well as allowing any SQL to be emitted on the given + :class:`.Connection`. **Please read fully** the notes + at :ref:`session_persistence_mapper` for guidelines on using + these methods; generally, the :meth:`.SessionEvents.before_flush` + method should be preferred for general on-flush changes. :param mapper: the :class:`.Mapper` which is the target of this event. @@ -833,6 +847,10 @@ def before_insert(self, mapper, connection, target): object associated with the instance. :return: No return value is supported by this event. + .. seealso:: + + :ref:`session_persistence_events` + """ def after_insert(self, mapper, connection, target): @@ -854,30 +872,14 @@ def after_insert(self, mapper, connection, target): event->persist->event steps. .. warning:: - Mapper-level flush events are designed to operate **on attributes - local to the immediate object being handled - and via SQL operations with the given** - :class:`.Connection` **only.** Handlers here should **not** make - alterations to the state of the :class:`.Session` overall, and in - general should not affect any :func:`.relationship` -mapped - attributes, as session cascade rules will not function properly, - nor is it always known if the related class has already been - handled. Operations that **are not supported in mapper - events** include: - - * :meth:`.Session.add` - * :meth:`.Session.delete` - * Mapped collection append, add, remove, delete, discard, etc. - * Mapped relationship attribute set/del events, - i.e. ``someobject.related = someotherobject`` - - Operations which manipulate the state of the object - relative to other objects are better handled: - - * In the ``__init__()`` method of the mapped object itself, - or another method designed to establish some particular state. - * In a ``@validates`` handler, see :ref:`simple_validators` - * Within the :meth:`.SessionEvents.before_flush` event. + + Mapper-level flush events only allow **very limited operations**, + on attributes local to the row being operated upon only, + as well as allowing any SQL to be emitted on the given + :class:`.Connection`. **Please read fully** the notes + at :ref:`session_persistence_mapper` for guidelines on using + these methods; generally, the :meth:`.SessionEvents.before_flush` + method should be preferred for general on-flush changes. :param mapper: the :class:`.Mapper` which is the target of this event. @@ -891,6 +893,10 @@ def after_insert(self, mapper, connection, target): object associated with the instance. :return: No return value is supported by this event. + .. seealso:: + + :ref:`session_persistence_events` + """ def before_update(self, mapper, connection, target): @@ -931,29 +937,14 @@ def before_update(self, mapper, connection, target): steps. .. warning:: - Mapper-level flush events are designed to operate **on attributes - local to the immediate object being handled - and via SQL operations with the given** :class:`.Connection` - **only.** Handlers here should **not** make alterations to the - state of the :class:`.Session` overall, and in general should not - affect any :func:`.relationship` -mapped attributes, as - session cascade rules will not function properly, nor is it - always known if the related class has already been handled. - Operations that **are not supported in mapper events** include: - - * :meth:`.Session.add` - * :meth:`.Session.delete` - * Mapped collection append, add, remove, delete, discard, etc. - * Mapped relationship attribute set/del events, - i.e. ``someobject.related = someotherobject`` - - Operations which manipulate the state of the object - relative to other objects are better handled: - - * In the ``__init__()`` method of the mapped object itself, - or another method designed to establish some particular state. - * In a ``@validates`` handler, see :ref:`simple_validators` - * Within the :meth:`.SessionEvents.before_flush` event. + + Mapper-level flush events only allow **very limited operations**, + on attributes local to the row being operated upon only, + as well as allowing any SQL to be emitted on the given + :class:`.Connection`. **Please read fully** the notes + at :ref:`session_persistence_mapper` for guidelines on using + these methods; generally, the :meth:`.SessionEvents.before_flush` + method should be preferred for general on-flush changes. :param mapper: the :class:`.Mapper` which is the target of this event. @@ -966,6 +957,11 @@ def before_update(self, mapper, connection, target): instead be the :class:`.InstanceState` state-management object associated with the instance. :return: No return value is supported by this event. + + .. seealso:: + + :ref:`session_persistence_events` + """ def after_update(self, mapper, connection, target): @@ -1005,29 +1001,14 @@ def after_update(self, mapper, connection, target): steps. .. warning:: - Mapper-level flush events are designed to operate **on attributes - local to the immediate object being handled - and via SQL operations with the given** :class:`.Connection` - **only.** Handlers here should **not** make alterations to the - state of the :class:`.Session` overall, and in general should not - affect any :func:`.relationship` -mapped attributes, as - session cascade rules will not function properly, nor is it - always known if the related class has already been handled. - Operations that **are not supported in mapper events** include: - - * :meth:`.Session.add` - * :meth:`.Session.delete` - * Mapped collection append, add, remove, delete, discard, etc. - * Mapped relationship attribute set/del events, - i.e. ``someobject.related = someotherobject`` - - Operations which manipulate the state of the object - relative to other objects are better handled: - - * In the ``__init__()`` method of the mapped object itself, - or another method designed to establish some particular state. - * In a ``@validates`` handler, see :ref:`simple_validators` - * Within the :meth:`.SessionEvents.before_flush` event. + + Mapper-level flush events only allow **very limited operations**, + on attributes local to the row being operated upon only, + as well as allowing any SQL to be emitted on the given + :class:`.Connection`. **Please read fully** the notes + at :ref:`session_persistence_mapper` for guidelines on using + these methods; generally, the :meth:`.SessionEvents.before_flush` + method should be preferred for general on-flush changes. :param mapper: the :class:`.Mapper` which is the target of this event. @@ -1041,6 +1022,10 @@ def after_update(self, mapper, connection, target): object associated with the instance. :return: No return value is supported by this event. + .. seealso:: + + :ref:`session_persistence_events` + """ def before_delete(self, mapper, connection, target): @@ -1056,29 +1041,14 @@ def before_delete(self, mapper, connection, target): once in a later step. .. warning:: - Mapper-level flush events are designed to operate **on attributes - local to the immediate object being handled - and via SQL operations with the given** :class:`.Connection` - **only.** Handlers here should **not** make alterations to the - state of the :class:`.Session` overall, and in general should not - affect any :func:`.relationship` -mapped attributes, as - session cascade rules will not function properly, nor is it - always known if the related class has already been handled. - Operations that **are not supported in mapper events** include: - - * :meth:`.Session.add` - * :meth:`.Session.delete` - * Mapped collection append, add, remove, delete, discard, etc. - * Mapped relationship attribute set/del events, - i.e. ``someobject.related = someotherobject`` - - Operations which manipulate the state of the object - relative to other objects are better handled: - - * In the ``__init__()`` method of the mapped object itself, - or another method designed to establish some particular state. - * In a ``@validates`` handler, see :ref:`simple_validators` - * Within the :meth:`.SessionEvents.before_flush` event. + + Mapper-level flush events only allow **very limited operations**, + on attributes local to the row being operated upon only, + as well as allowing any SQL to be emitted on the given + :class:`.Connection`. **Please read fully** the notes + at :ref:`session_persistence_mapper` for guidelines on using + these methods; generally, the :meth:`.SessionEvents.before_flush` + method should be preferred for general on-flush changes. :param mapper: the :class:`.Mapper` which is the target of this event. @@ -1092,6 +1062,10 @@ def before_delete(self, mapper, connection, target): object associated with the instance. :return: No return value is supported by this event. + .. seealso:: + + :ref:`session_persistence_events` + """ def after_delete(self, mapper, connection, target): @@ -1107,29 +1081,14 @@ def after_delete(self, mapper, connection, target): once in a previous step. .. warning:: - Mapper-level flush events are designed to operate **on attributes - local to the immediate object being handled - and via SQL operations with the given** :class:`.Connection` - **only.** Handlers here should **not** make alterations to the - state of the :class:`.Session` overall, and in general should not - affect any :func:`.relationship` -mapped attributes, as - session cascade rules will not function properly, nor is it - always known if the related class has already been handled. - Operations that **are not supported in mapper events** include: - - * :meth:`.Session.add` - * :meth:`.Session.delete` - * Mapped collection append, add, remove, delete, discard, etc. - * Mapped relationship attribute set/del events, - i.e. ``someobject.related = someotherobject`` - - Operations which manipulate the state of the object - relative to other objects are better handled: - - * In the ``__init__()`` method of the mapped object itself, - or another method designed to establish some particular state. - * In a ``@validates`` handler, see :ref:`simple_validators` - * Within the :meth:`.SessionEvents.before_flush` event. + + Mapper-level flush events only allow **very limited operations**, + on attributes local to the row being operated upon only, + as well as allowing any SQL to be emitted on the given + :class:`.Connection`. **Please read fully** the notes + at :ref:`session_persistence_mapper` for guidelines on using + these methods; generally, the :meth:`.SessionEvents.before_flush` + method should be preferred for general on-flush changes. :param mapper: the :class:`.Mapper` which is the target of this event. @@ -1143,8 +1102,13 @@ def after_delete(self, mapper, connection, target): object associated with the instance. :return: No return value is supported by this event. + .. seealso:: + + :ref:`session_persistence_events` + """ + class _MapperEventsHold(_EventsHold): all_holds = weakref.WeakKeyDictionary() @@ -1195,11 +1159,11 @@ def _accept_with(cls, target): ( not isinstance(target, type) or not issubclass(target, Session) - ): + ): raise exc.ArgumentError( - "Session event listen on a scoped_session " - "requires that its creation callable " - "is associated with the Session class.") + "Session event listen on a scoped_session " + "requires that its creation callable " + "is associated with the Session class.") if isinstance(target, sessionmaker): return target.class_ @@ -1264,8 +1228,10 @@ def before_commit(self, session): The :meth:`~.SessionEvents.before_commit` hook is *not* per-flush, that is, the :class:`.Session` can emit SQL to the database many times within the scope of a transaction. - For interception of these events, use the :meth:`~.SessionEvents.before_flush`, - :meth:`~.SessionEvents.after_flush`, or :meth:`~.SessionEvents.after_flush_postexec` + For interception of these events, use the + :meth:`~.SessionEvents.before_flush`, + :meth:`~.SessionEvents.after_flush`, or + :meth:`~.SessionEvents.after_flush_postexec` events. :param session: The target :class:`.Session`. @@ -1290,16 +1256,19 @@ def after_commit(self, session): The :meth:`~.SessionEvents.after_commit` hook is *not* per-flush, that is, the :class:`.Session` can emit SQL to the database many times within the scope of a transaction. - For interception of these events, use the :meth:`~.SessionEvents.before_flush`, - :meth:`~.SessionEvents.after_flush`, or :meth:`~.SessionEvents.after_flush_postexec` + For interception of these events, use the + :meth:`~.SessionEvents.before_flush`, + :meth:`~.SessionEvents.after_flush`, or + :meth:`~.SessionEvents.after_flush_postexec` events. .. note:: - The :class:`.Session` is not in an active tranasction - when the :meth:`~.SessionEvents.after_commit` event is invoked, and therefore - can not emit SQL. To emit SQL corresponding to every transaction, - use the :meth:`~.SessionEvents.before_commit` event. + The :class:`.Session` is not in an active transaction + when the :meth:`~.SessionEvents.after_commit` event is invoked, + and therefore can not emit SQL. To emit SQL corresponding to + every transaction, use the :meth:`~.SessionEvents.before_commit` + event. :param session: The target :class:`.Session`. @@ -1377,6 +1346,8 @@ def before_flush(self, session, flush_context, instances): :meth:`~.SessionEvents.after_flush_postexec` + :ref:`session_persistence_events` + """ def after_flush(self, session, flush_context): @@ -1397,6 +1368,8 @@ def after_flush(self, session, flush_context): :meth:`~.SessionEvents.after_flush_postexec` + :ref:`session_persistence_events` + """ def after_flush_postexec(self, session, flush_context): @@ -1419,6 +1392,8 @@ def after_flush_postexec(self, session, flush_context): :meth:`~.SessionEvents.after_flush` + :ref:`session_persistence_events` + """ def after_begin(self, session, transaction, connection): @@ -1447,8 +1422,8 @@ def before_attach(self, session, instance): This is called before an add, delete or merge causes the object to be part of the session. - .. versionadded:: 0.8. Note that :meth:`~.SessionEvents.after_attach` now - fires off after the item is part of the session. + .. versionadded:: 0.8. Note that :meth:`~.SessionEvents.after_attach` + now fires off after the item is part of the session. :meth:`.before_attach` is provided for those cases where the item should not yet be part of the session state. @@ -1456,6 +1431,8 @@ def before_attach(self, session, instance): :meth:`~.SessionEvents.after_attach` + :ref:`session_lifecycle_events` + """ def after_attach(self, session, instance): @@ -1478,15 +1455,17 @@ def after_attach(self, session, instance): :meth:`~.SessionEvents.before_attach` + :ref:`session_lifecycle_events` + """ @event._legacy_signature("0.9", - ["session", "query", "query_context", "result"], - lambda update_context: ( - update_context.session, - update_context.query, - update_context.context, - update_context.result)) + ["session", "query", "query_context", "result"], + lambda update_context: ( + update_context.session, + update_context.query, + update_context.context, + update_context.result)) def after_bulk_update(self, update_context): """Execute after a bulk update operation to the session. @@ -1496,8 +1475,8 @@ def after_bulk_update(self, update_context): details about the update, including these attributes: * ``session`` - the :class:`.Session` involved - * ``query`` -the :class:`.Query` object that this update operation was - called upon. + * ``query`` -the :class:`.Query` object that this update operation + was called upon. * ``context`` The :class:`.QueryContext` object, corresponding to the invocation of an ORM query. * ``result`` the :class:`.ResultProxy` returned as a result of the @@ -1507,12 +1486,12 @@ def after_bulk_update(self, update_context): """ @event._legacy_signature("0.9", - ["session", "query", "query_context", "result"], - lambda delete_context: ( - delete_context.session, - delete_context.query, - delete_context.context, - delete_context.result)) + ["session", "query", "query_context", "result"], + lambda delete_context: ( + delete_context.session, + delete_context.query, + delete_context.context, + delete_context.result)) def after_bulk_delete(self, delete_context): """Execute after a bulk delete operation to the session. @@ -1522,8 +1501,8 @@ def after_bulk_delete(self, delete_context): details about the update, including these attributes: * ``session`` - the :class:`.Session` involved - * ``query`` -the :class:`.Query` object that this update operation was - called upon. + * ``query`` -the :class:`.Query` object that this update operation + was called upon. * ``context`` The :class:`.QueryContext` object, corresponding to the invocation of an ORM query. * ``result`` the :class:`.ResultProxy` returned as a result of the @@ -1595,8 +1574,9 @@ def validate_phone(target, value, oldvalue, initiator): @staticmethod def _set_dispatch(cls, dispatch_cls): - event.Events._set_dispatch(cls, dispatch_cls) + dispatch = event.Events._set_dispatch(cls, dispatch_cls) dispatch_cls._active_history = False + return dispatch @classmethod def _accept_with(cls, target): @@ -1608,11 +1588,12 @@ def _accept_with(cls, target): @classmethod def _listen(cls, event_key, active_history=False, - raw=False, retval=False, - propagate=False): + raw=False, retval=False, + propagate=False): target, identifier, fn = \ - event_key.dispatch_target, event_key.identifier, event_key.fn + event_key.dispatch_target, event_key.identifier, \ + event_key._listen_fn if active_history: target.dispatch._active_history = True @@ -1634,7 +1615,8 @@ def wrap(target, value, *arg): manager = instrumentation.manager_of_class(target.class_) for mgr in manager.subclass_managers(True): - event_key.with_dispatch_target(mgr[target.key]).base_listen(propagate=True) + event_key.with_dispatch_target( + mgr[target.key]).base_listen(propagate=True) def append(self, target, value, initiator): """Receive a collection append event. @@ -1648,12 +1630,13 @@ def append(self, target, value, initiator): replaces it. :param initiator: An instance of :class:`.attributes.Event` representing the initiation of the event. May be modified - from it's original value by backref handlers in order to control + from its original value by backref handlers in order to control chained event propagation. .. versionchanged:: 0.9.0 the ``initiator`` argument is now - passed as a :class:`.attributes.Event` object, and may be modified - by backref handlers within a chain of backref-linked events. + passed as a :class:`.attributes.Event` object, and may be + modified by backref handlers within a chain of backref-linked + events. :return: if the event was registered with ``retval=True``, the given value, or a new effective value, should be returned. @@ -1669,12 +1652,13 @@ def remove(self, target, value, initiator): :param value: the value being removed. :param initiator: An instance of :class:`.attributes.Event` representing the initiation of the event. May be modified - from it's original value by backref handlers in order to control + from its original value by backref handlers in order to control chained event propagation. .. versionchanged:: 0.9.0 the ``initiator`` argument is now - passed as a :class:`.attributes.Event` object, and may be modified - by backref handlers within a chain of backref-linked events. + passed as a :class:`.attributes.Event` object, and may be + modified by backref handlers within a chain of backref-linked + events. :return: No return value is defined for this event. """ @@ -1697,15 +1681,121 @@ def set(self, target, value, oldvalue, initiator): or expired. :param initiator: An instance of :class:`.attributes.Event` representing the initiation of the event. May be modified - from it's original value by backref handlers in order to control + from its original value by backref handlers in order to control chained event propagation. .. versionchanged:: 0.9.0 the ``initiator`` argument is now - passed as a :class:`.attributes.Event` object, and may be modified - by backref handlers within a chain of backref-linked events. + passed as a :class:`.attributes.Event` object, and may be + modified by backref handlers within a chain of backref-linked + events. :return: if the event was registered with ``retval=True``, the given value, or a new effective value, should be returned. """ + def init_collection(self, target, collection, collection_adapter): + """Receive a 'collection init' event. + + This event is triggered for a collection-based attribute, when + the initial "empty collection" is first generated for a blank + attribute, as well as for when the collection is replaced with + a new one, such as via a set event. + + E.g., given that ``User.addresses`` is a relationship-based + collection, the event is triggered here:: + + u1 = User() + u1.addresses.append(a1) # <- new collection + + and also during replace operations:: + + u1.addresses = [a2, a3] # <- new collection + + :param target: the object instance receiving the event. + If the listener is registered with ``raw=True``, this will + be the :class:`.InstanceState` object. + :param collection: the new collection. This will always be generated + from what was specified as + :paramref:`.RelationshipProperty.collection_class`, and will always + be empty. + :param collection_adpater: the :class:`.CollectionAdapter` that will + mediate internal access to the collection. + + .. versionadded:: 1.0.0 the :meth:`.AttributeEvents.init_collection` + and :meth:`.AttributeEvents.dispose_collection` events supersede + the :class:`.collection.linker` hook. + + """ + + def dispose_collection(self, target, collection, collection_adpater): + """Receive a 'collection dispose' event. + + This event is triggered for a collection-based attribute when + a collection is replaced, that is:: + + u1.addresses.append(a1) + + u1.addresses = [a2, a3] # <- old collection is disposed + + The mechanics of the event will typically include that the given + collection is empty, even if it stored objects while being replaced. + + .. versionadded:: 1.0.0 the :meth:`.AttributeEvents.init_collection` + and :meth:`.AttributeEvents.dispose_collection` events supersede + the :class:`.collection.linker` hook. + + """ + + +class QueryEvents(event.Events): + """Represent events within the construction of a :class:`.Query` object. + + The events here are intended to be used with an as-yet-unreleased + inspection system for :class:`.Query`. Some very basic operations + are possible now, however the inspection system is intended to allow + complex query manipulations to be automated. + + .. versionadded:: 1.0.0 + + """ + + _target_class_doc = "SomeQuery" + _dispatch_target = Query + + def before_compile(self, query): + """Receive the :class:`.Query` object before it is composed into a + core :class:`.Select` object. + + This event is intended to allow changes to the query given:: + + @event.listens_for(Query, "before_compile", retval=True) + def no_deleted(query): + for desc in query.column_descriptions: + if desc['type'] is User: + entity = desc['entity'] + query = query.filter(entity.deleted == False) + return query + + The event should normally be listened with the ``retval=True`` + parameter set, so that the modified query may be returned. + + + """ + + @classmethod + def _listen( + cls, event_key, retval=False, **kw): + fn = event_key._listen_fn + + if not retval: + def wrap(*arg, **kw): + if not retval: + query = arg[0] + fn(*arg, **kw) + return query + else: + return fn(*arg, **kw) + event_key = event_key.with_wrapper(wrap) + + event_key.base_listen(**kw) diff --git a/lib/sqlalchemy/orm/exc.py b/lib/sqlalchemy/orm/exc.py index d1ef1ded98..db993220f5 100644 --- a/lib/sqlalchemy/orm/exc.py +++ b/lib/sqlalchemy/orm/exc.py @@ -1,5 +1,6 @@ # orm/exc.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -120,7 +121,7 @@ class ObjectDeletedError(sa_exc.InvalidRequestError): def __init__(self, base, state, msg=None): if not msg: msg = "Instance '%s' has been deleted, or its "\ - "row is otherwise not present." % base.state_str(state) + "row is otherwise not present." % base.state_str(state) sa_exc.InvalidRequestError.__init__(self, msg) @@ -149,6 +150,7 @@ def _safe_cls_name(cls): cls_name = repr(cls) return cls_name + @util.dependencies("sqlalchemy.orm.base") def _default_unmapped(base, cls): try: diff --git a/lib/sqlalchemy/orm/identity.py b/lib/sqlalchemy/orm/identity.py index a91085d286..5646732e85 100644 --- a/lib/sqlalchemy/orm/identity.py +++ b/lib/sqlalchemy/orm/identity.py @@ -1,5 +1,6 @@ # orm/identity.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -8,17 +9,27 @@ from . import attributes from .. import util -class IdentityMap(dict): + +class IdentityMap(object): def __init__(self): + self._dict = {} self._modified = set() self._wr = weakref.ref(self) + def keys(self): + return self._dict.keys() + def replace(self, state): raise NotImplementedError() def add(self, state): raise NotImplementedError() + def _add_unpresent(self, state, key): + """optional inlined form of add() which can assume item isn't present + in the map""" + self.add(state) + def update(self, dict): raise NotImplementedError("IdentityMap uses add() to insert data") @@ -33,7 +44,8 @@ def _manage_incoming_state(self, state): def _manage_removed_state(self, state): del state._instance_dict - self._modified.discard(state) + if state.modified: + self._modified.discard(state) def _dirty_states(self): return self._modified @@ -57,6 +69,9 @@ def pop(self, key, *args): def setdefault(self, key, default=None): raise NotImplementedError("IdentityMap uses add() to insert data") + def __len__(self): + return len(self._dict) + def copy(self): raise NotImplementedError() @@ -68,11 +83,9 @@ def __delitem__(self, key): class WeakInstanceDict(IdentityMap): - def __init__(self): - IdentityMap.__init__(self) def __getitem__(self, key): - state = dict.__getitem__(self, key) + state = self._dict[key] o = state.obj() if o is None: raise KeyError(key) @@ -80,8 +93,8 @@ def __getitem__(self, key): def __contains__(self, key): try: - if dict.__contains__(self, key): - state = dict.__getitem__(self, key) + if key in self._dict: + state = self._dict[key] o = state.obj() else: return False @@ -91,25 +104,25 @@ def __contains__(self, key): return o is not None def contains_state(self, state): - return dict.get(self, state.key) is state + return state.key in self._dict and self._dict[state.key] is state def replace(self, state): - if dict.__contains__(self, state.key): - existing = dict.__getitem__(self, state.key) + if state.key in self._dict: + existing = self._dict[state.key] if existing is not state: self._manage_removed_state(existing) else: return - dict.__setitem__(self, state.key, state) + self._dict[state.key] = state self._manage_incoming_state(state) def add(self, state): key = state.key # inline of self.__contains__ - if dict.__contains__(self, key): + if key in self._dict: try: - existing_state = dict.__getitem__(self, key) + existing_state = self._dict[key] if existing_state is not state: o = existing_state.obj() if o is not None: @@ -121,19 +134,24 @@ def add(self, state): return except KeyError: pass - dict.__setitem__(self, key, state) + self._dict[key] = state self._manage_incoming_state(state) + def _add_unpresent(self, state, key): + # inlined form of add() called by loading.py + self._dict[key] = state + state._instance_dict = self._wr + def get(self, key, default=None): - state = dict.get(self, key, default) - if state is default: + if key not in self._dict: return default + state = self._dict[key] o = state.obj() if o is None: return default return o - def _items(self): + def items(self): values = self.all_states() result = [] for state in values: @@ -142,7 +160,7 @@ def _items(self): result.append((state.key, value)) return result - def _values(self): + def values(self): values = self.all_states() result = [] for state in values: @@ -152,39 +170,81 @@ def _values(self): return result + def __iter__(self): + return iter(self.keys()) + if util.py2k: - items = _items - values = _values def iteritems(self): return iter(self.items()) def itervalues(self): return iter(self.values()) - else: - def items(self): - return iter(self._items()) - - def values(self): - return iter(self._values()) def all_states(self): if util.py2k: - return dict.values(self) + return self._dict.values() else: - return list(dict.values(self)) + return list(self._dict.values()) + + def _fast_discard(self, state): + self._dict.pop(state.key, None) def discard(self, state): - st = dict.get(self, state.key, None) - if st is state: - dict.pop(self, state.key, None) + st = self._dict.pop(state.key, None) + if st: + assert st is state self._manage_removed_state(state) + def safe_discard(self, state): + if state.key in self._dict: + st = self._dict[state.key] + if st is state: + self._dict.pop(state.key, None) + self._manage_removed_state(state) + def prune(self): return 0 class StrongInstanceDict(IdentityMap): + """A 'strong-referencing' version of the identity map. + + .. deprecated:: this object is present in order to fulfill + the ``weak_identity_map=False`` option of the Session. + This option is present to allow compatibility with older applications, + but it is recommended that strong references to objects + be maintained by the calling application + externally to the :class:`.Session` itself, to the degree + that is needed by the application. + + """ + + if util.py2k: + def itervalues(self): + return self._dict.itervalues() + + def iteritems(self): + return self._dict.iteritems() + + def __iter__(self): + return iter(self.dict_) + + def __getitem__(self, key): + return self._dict[key] + + def __contains__(self, key): + return key in self._dict + + def get(self, key, default=None): + return self._dict.get(key, default) + + def values(self): + return self._dict.values() + + def items(self): + return self._dict.items() + def all_states(self): return [attributes.instance_state(o) for o in self.values()] @@ -194,34 +254,48 @@ def contains_state(self, state): attributes.instance_state(self[state.key]) is state) def replace(self, state): - if dict.__contains__(self, state.key): - existing = dict.__getitem__(self, state.key) + if state.key in self._dict: + existing = self._dict[state.key] existing = attributes.instance_state(existing) if existing is not state: self._manage_removed_state(existing) else: return - dict.__setitem__(self, state.key, state.obj()) + self._dict[state.key] = state.obj() self._manage_incoming_state(state) def add(self, state): if state.key in self: - if attributes.instance_state(dict.__getitem__(self, - state.key)) is not state: + if attributes.instance_state(self._dict[state.key]) is not state: raise AssertionError('A conflicting state is already ' - 'present in the identity map for key %r' - % (state.key, )) + 'present in the identity map for key %r' + % (state.key, )) else: - dict.__setitem__(self, state.key, state.obj()) + self._dict[state.key] = state.obj() self._manage_incoming_state(state) + def _add_unpresent(self, state, key): + # inlined form of add() called by loading.py + self._dict[key] = state.obj() + state._instance_dict = self._wr + + def _fast_discard(self, state): + self._dict.pop(state.key, None) + def discard(self, state): - obj = dict.get(self, state.key, None) + obj = self._dict.pop(state.key, None) if obj is not None: + self._manage_removed_state(state) + st = attributes.instance_state(obj) + assert st is state + + def safe_discard(self, state): + if state.key in self._dict: + obj = self._dict[state.key] st = attributes.instance_state(obj) if st is state: - dict.pop(self, state.key, None) + self._dict.pop(state.key, None) self._manage_removed_state(state) def prune(self): @@ -234,7 +308,7 @@ def prune(self): keepers = weakref.WeakValueDictionary() keepers.update(self) - dict.clear(self) - dict.update(self, keepers) + self._dict.clear() + self._dict.update(keepers) self.modified = bool(dirty) return ref_count - len(self) diff --git a/lib/sqlalchemy/orm/instrumentation.py b/lib/sqlalchemy/orm/instrumentation.py index 68b4f06115..d41ee59cb9 100644 --- a/lib/sqlalchemy/orm/instrumentation.py +++ b/lib/sqlalchemy/orm/instrumentation.py @@ -1,5 +1,6 @@ # orm/instrumentation.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -33,12 +34,18 @@ from .. import util from . import base + +_memoized_key_collection = util.group_expirable_memoized_property() + + class ClassManager(dict): """tracks state information at the class level.""" MANAGER_ATTR = base.DEFAULT_MANAGER_ATTR STATE_ATTR = base.DEFAULT_STATE_ATTR + _state_setter = staticmethod(util.attrsetter(STATE_ATTR)) + deferred_scalar_loader = None original_init = object.__init__ @@ -53,16 +60,16 @@ def __init__(self, class_): self.originals = {} self._bases = [mgr for mgr in [ - manager_of_class(base) - for base in self.class_.__bases__ - if isinstance(base, type) - ] if mgr is not None] + manager_of_class(base) + for base in self.class_.__bases__ + if isinstance(base, type) + ] if mgr is not None] for base in self._bases: self.update(base) self.dispatch._events._new_classmanager_instance(class_, self) - #events._InstanceEventsHold.populate(class_, self) + # events._InstanceEventsHold.populate(class_, self) for basecls in class_.__mro__: mgr = manager_of_class(basecls) @@ -73,10 +80,10 @@ def __init__(self, class_): if '__del__' in class_.__dict__: util.warn("__del__() method on class %s will " - "cause unreachable cycles and memory leaks, " - "as SQLAlchemy instrumentation often creates " - "reference cycles. Please remove this method." % - class_) + "cause unreachable cycles and memory leaks, " + "as SQLAlchemy instrumentation often creates " + "reference cycles. Please remove this method." % + class_) def __hash__(self): return id(self) @@ -88,6 +95,21 @@ def __eq__(self, other): def is_mapped(self): return 'mapper' in self.__dict__ + @_memoized_key_collection + def _all_key_set(self): + return frozenset(self) + + @_memoized_key_collection + def _collection_impl_keys(self): + return frozenset([ + attr.key for attr in self.values() if attr.impl.collection]) + + @_memoized_key_collection + def _scalar_loader_impls(self): + return frozenset([ + attr.impl for attr in + self.values() if attr.impl.accepts_scalar_loader]) + @util.memoized_property def mapper(self): # raises unless self.mapper has been assigned @@ -95,10 +117,11 @@ def mapper(self): def _all_sqla_attributes(self, exclude=None): """return an iterator of all classbound attributes that are - implement :class:`._InspectionAttr`. + implement :class:`.InspectionAttr`. This includes :class:`.QueryableAttribute` as well as extension - types such as :class:`.hybrid_property` and :class:`.AssociationProxy`. + types such as :class:`.hybrid_property` and + :class:`.AssociationProxy`. """ if exclude is None: @@ -107,10 +130,9 @@ def _all_sqla_attributes(self, exclude=None): for key in set(supercls.__dict__).difference(exclude): exclude.add(key) val = supercls.__dict__[key] - if isinstance(val, interfaces._InspectionAttr): + if isinstance(val, interfaces.InspectionAttr): yield key, val - def _attr_has_impl(self, key): """Return True if the given attribute is fully initialized. @@ -184,7 +206,6 @@ def state_getter(self): def dict_getter(self): return _default_dict_getter - def instrument_attribute(self, key, inst, propagated=False): if propagated: if key in self.local_attrs: @@ -192,6 +213,7 @@ def instrument_attribute(self, key, inst, propagated=False): else: self.local_attrs[key] = inst self.install_descriptor(key, inst) + _memoized_key_collection.expire_instance(self) self[key] = inst for cls in self.class_.__subclasses__(): @@ -209,7 +231,7 @@ def subclass_managers(self, recursive): def post_configure_attribute(self, key): _instrumentation_factory.dispatch.\ - attribute_instrument(self.class_, key, self[key]) + attribute_instrument(self.class_, key, self[key]) def uninstrument_attribute(self, key, propagated=False): if key not in self: @@ -220,6 +242,7 @@ def uninstrument_attribute(self, key, propagated=False): else: del self.local_attrs[key] self.uninstall_descriptor(key) + _memoized_key_collection.expire_instance(self) del self[key] for cls in self.class_.__subclasses__(): manager = manager_of_class(cls) @@ -283,17 +306,19 @@ def get_impl(self, key): def attributes(self): return iter(self.values()) - ## InstanceState management + # InstanceState management def new_instance(self, state=None): instance = self.class_.__new__(self.class_) - setattr(instance, self.STATE_ATTR, - state or self._state_constructor(instance, self)) + if state is None: + state = self._state_constructor(instance, self) + self._state_setter(instance, state) return instance def setup_instance(self, instance, state=None): - setattr(instance, self.STATE_ATTR, - state or self._state_constructor(instance, self)) + if state is None: + state = self._state_constructor(instance, self) + self._state_setter(instance, state) def teardown_instance(self, instance): delattr(instance, self.STATE_ATTR) @@ -317,10 +342,10 @@ def _new_state_if_none(self, instance): # to be constructed, so that it is usable # in a non-ORM context at least. return self._subclass_manager(instance.__class__).\ - _new_state_if_none(instance) + _new_state_if_none(instance) else: state = self._state_constructor(instance, self) - setattr(instance, self.STATE_ATTR, state) + self._state_setter(instance, state) return state def has_state(self, instance): @@ -340,6 +365,7 @@ def __repr__(self): return '<%s of %r at %x>' % ( self.__class__.__name__, self.class_, id(self)) + class _SerializeManager(object): """Provide serialization of a :class:`.ClassManager`. @@ -347,6 +373,7 @@ class _SerializeManager(object): and ``__call__()`` on deserialize. """ + def __init__(self, state, d): self.class_ = state.class_ manager = state.manager @@ -356,12 +383,12 @@ def __call__(self, state, inst, state_dict): state.manager = manager = manager_of_class(self.class_) if manager is None: raise exc.UnmappedInstanceError( - inst, - "Cannot deserialize object of type %r - " - "no mapper() has " - "been configured for this class within the current " - "Python process!" % - self.class_) + inst, + "Cannot deserialize object of type %r - " + "no mapper() has " + "been configured for this class within the current " + "Python process!" % + self.class_) elif manager.is_mapped and not manager.mapper.configured: manager.mapper._configure_all() @@ -372,6 +399,7 @@ def __call__(self, state, inst, state_dict): manager.setup_instance(inst, state) manager.dispatch.unpickle(state, state_dict) + class InstrumentationFactory(object): """Factory for new ClassManager instances.""" @@ -423,6 +451,7 @@ def unregister(self, class_): manager_of_class = _default_manager_getter = base.manager_of_class + def register_class(class_): """Register class instrumentation. @@ -451,7 +480,7 @@ def is_instrumented(instance, key): """ return manager_of_class(instance.__class__).\ - is_instrumented(key, search=True) + is_instrumented(key, search=True) def _generate_init(class_, class_manager): diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py index 1b0bf48a99..2ff00ae1c0 100644 --- a/lib/sqlalchemy/orm/interfaces.py +++ b/lib/sqlalchemy/orm/interfaces.py @@ -1,5 +1,6 @@ # orm/interfaces.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -8,24 +9,28 @@ Contains various base classes used throughout the ORM. -Defines the now deprecated ORM extension classes as well -as ORM internals. +Defines some key base classes prominent within the internals, +as well as the now-deprecated ORM extension classes. Other than the deprecated extensions, this module and the -classes within should be considered mostly private. +classes within are mostly private, though some attributes +are exposed when inspecting mappings. """ from __future__ import absolute_import -from .. import exc as sa_exc, util, inspect +from .. import util from ..sql import operators -from collections import deque -from .base import ONETOMANY, MANYTOONE, MANYTOMANY, EXT_CONTINUE, EXT_STOP, NOT_EXTENSION -from .base import _InspectionAttr, _MappedAttribute -from .path_registry import PathRegistry +from .base import (ONETOMANY, MANYTOONE, MANYTOMANY, + EXT_CONTINUE, EXT_STOP, NOT_EXTENSION) +from .base import (InspectionAttr, InspectionAttr, + InspectionAttrInfo, _MappedAttribute) import collections +from .. import inspect +# imported later +MapperExtension = SessionExtension = AttributeExtension = None __all__ = ( 'AttributeExtension', @@ -42,15 +47,11 @@ 'PropComparator', 'SessionExtension', 'StrategizedProperty', - ) - +) -class MapperProperty(_MappedAttribute, _InspectionAttr): - """Manage the relationship of a ``Mapper`` to a single class - attribute, as well as that attribute as it appears on individual - instances of the class, including attribute instrumentation, - attribute access, loading behavior, and dependency calculations. +class MapperProperty(_MappedAttribute, InspectionAttr, util.MemoizedSlots): + """Represent a particular class attribute mapped by :class:`.Mapper`. The most common occurrences of :class:`.MapperProperty` are the mapped :class:`.Column`, which is represented in a mapping as @@ -61,14 +62,51 @@ class MapperProperty(_MappedAttribute, _InspectionAttr): """ + __slots__ = ( + '_configure_started', '_configure_finished', 'parent', 'key', + 'info' + ) + cascade = frozenset() """The set of 'cascade' attribute names. This collection is checked before the 'cascade_iterator' method is called. + The collection typically only applies to a RelationshipProperty. + """ is_property = True + """Part of the InspectionAttr interface; states this object is a + mapper property. + + """ + + def _memoized_attr_info(self): + """Info dictionary associated with the object, allowing user-defined + data to be associated with this :class:`.InspectionAttr`. + + The dictionary is generated when first accessed. Alternatively, + it can be specified as a constructor argument to the + :func:`.column_property`, :func:`.relationship`, or :func:`.composite` + functions. + + .. versionadded:: 0.8 Added support for .info to all + :class:`.MapperProperty` subclasses. + + .. versionchanged:: 1.0.0 :attr:`.MapperProperty.info` is also + available on extension types via the + :attr:`.InspectionAttrInfo.info` attribute, so that it can apply + to a wider variety of ORM and extension constructs. + + .. seealso:: + + :attr:`.QueryableAttribute.info` + + :attr:`.SchemaItem.info` + + """ + return {} def setup(self, context, entity, path, adapter, **kwargs): """Called by Query for the purposes of constructing a SQL statement. @@ -76,19 +114,18 @@ def setup(self, context, entity, path, adapter, **kwargs): Each MapperProperty associated with the target mapper processes the statement referenced by the query context, adding columns and/or criterion as appropriate. - """ - pass + """ def create_row_processor(self, context, path, - mapper, row, adapter): - """Return a 3-tuple consisting of three row processing functions. + mapper, result, adapter, populators): + """Produce row processing functions and append to the given + set of populators lists. """ - return None, None, None def cascade_iterator(self, type_, state, visited_instances=None, - halt_on=None): + halt_on=None): """Iterate through instances related to the given instance for a particular 'cascade', starting with this MapperProperty. @@ -97,41 +134,44 @@ def cascade_iterator(self, type_, state, visited_instances=None, Note that the 'cascade' collection on this MapperProperty is checked first for the given type before cascade_iterator is called. - See PropertyLoader for the related instance implementation. + This method typically only applies to RelationshipProperty. + """ return iter(()) def set_parent(self, parent, init): - self.parent = parent - - def instrument_class(self, mapper): # pragma: no-coverage - raise NotImplementedError() + """Set the parent mapper that references this MapperProperty. - @util.memoized_property - def info(self): - """Info dictionary associated with the object, allowing user-defined - data to be associated with this :class:`.MapperProperty`. + This method is overridden by some subclasses to perform extra + setup when the mapper is first known. - The dictionary is generated when first accessed. Alternatively, - it can be specified as a constructor argument to the - :func:`.column_property`, :func:`.relationship`, or :func:`.composite` - functions. + """ + self.parent = parent - .. versionadded:: 0.8 Added support for .info to all - :class:`.MapperProperty` subclasses. + def instrument_class(self, mapper): + """Hook called by the Mapper to the property to initiate + instrumentation of the class attribute managed by this + MapperProperty. - .. seealso:: + The MapperProperty here will typically call out to the + attributes module to set up an InstrumentedAttribute. - :attr:`.QueryableAttribute.info` + This step is the first of two steps to set up an InstrumentedAttribute, + and is called early in the mapper setup process. - :attr:`.SchemaItem.info` + The second step is typically the init_class_attribute step, + called from StrategizedProperty via the post_instrument_class() + hook. This step assigns additional state to the InstrumentedAttribute + (specifically the "impl") which has been determined after the + MapperProperty has determined what kind of persistence + management it needs to do (e.g. scalar, object, collection, etc). """ - return {} - _configure_started = False - _configure_finished = False + def __init__(self): + self._configure_started = False + self._configure_finished = False def init(self): """Called after all mappers are created to assemble @@ -178,53 +218,36 @@ def do_init(self): """ - pass - def post_instrument_class(self, mapper): """Perform instrumentation adjustments that need to occur after init() has completed. - """ - pass + The given Mapper is the Mapper invoking the operation, which + may not be the same Mapper as self.parent in an inheritance + scenario; however, Mapper will always at least be a sub-mapper of + self.parent. - def is_primary(self): - """Return True if this ``MapperProperty``'s mapper is the - primary mapper for its class. + This method is typically used by StrategizedProperty, which delegates + it to LoaderStrategy.init_class_attribute() to perform final setup + on the class-bound InstrumentedAttribute. - This flag is used to indicate that the ``MapperProperty`` can - define attribute instrumentation for the class at the class - level (as opposed to the individual instance level). """ - return not self.parent.non_primary - def merge(self, session, source_state, source_dict, dest_state, - dest_dict, load, _recursive): + dest_dict, load, _recursive): """Merge the attribute represented by this ``MapperProperty`` - from source to destination object""" - - pass - - def compare(self, operator, value, **kw): - """Return a compare operation for the columns represented by - this ``MapperProperty`` to the given value, which may be a - column value or an instance. 'operator' is an operator from - the operators module, or from sql.Comparator. + from source to destination object. - By default uses the PropComparator attached to this MapperProperty - under the attribute name "comparator". """ - return operator(self.comparator, value) - def __repr__(self): return '<%s at 0x%x; %s>' % ( self.__class__.__name__, id(self), getattr(self, 'key', 'no key')) + class PropComparator(operators.ColumnOperators): - """Defines boolean, comparison, and other operators for - :class:`.MapperProperty` objects. + """Defines SQL operators for :class:`.MapperProperty` objects. SQLAlchemy allows for operators to be redefined at both the Core and ORM level. :class:`.PropComparator` @@ -311,9 +334,11 @@ class SomeMappedClass(Base): """ + __slots__ = 'prop', 'property', '_parententity', '_adapt_to_entity' + def __init__(self, prop, parentmapper, adapt_to_entity=None): self.prop = self.property = prop - self._parentmapper = parentmapper + self._parententity = adapt_to_entity or parentmapper self._adapt_to_entity = adapt_to_entity def __clause_element__(self): @@ -326,7 +351,13 @@ def adapt_to_entity(self, adapt_to_entity): """Return a copy of this PropComparator which will use the given :class:`.AliasedInsp` to produce corresponding expressions. """ - return self.__class__(self.prop, self._parentmapper, adapt_to_entity) + return self.__class__(self.prop, self._parententity, adapt_to_entity) + + @property + def _parentmapper(self): + """legacy; this is renamed to _parententity to be + compatible with QueryableAttribute.""" + return inspect(self._parententity).mapper @property def adapter(self): @@ -339,7 +370,7 @@ def adapter(self): else: return self._adapt_to_entity._adapt_element - @util.memoized_property + @property def info(self): return self.property.info @@ -419,8 +450,17 @@ class StrategizedProperty(MapperProperty): strategies can be selected at Query time through the usage of ``StrategizedOption`` objects via the Query.options() method. + The mechanics of StrategizedProperty are used for every Query + invocation for every mapped attribute participating in that Query, + to determine first how the attribute will be rendered in SQL + and secondly how the attribute will retrieve a value from a result + row and apply it to a mapped object. The routines here are very + performance-critical. + """ + __slots__ = '_strategies', 'strategy' + strategy_wildcard_key = None def _get_context_loader(self, context, path): @@ -433,10 +473,10 @@ def _get_context_loader(self, context, path): # search among: exact match, "attr.*", "default" strategy # if any. for path_key in ( - search_path._loader_key, - search_path._wildcard_path_loader_key, - search_path._default_path_loader_key - ): + search_path._loader_key, + search_path._wildcard_path_loader_key, + search_path._default_path_loader_key + ): if path_key in context.attributes: load = context.attributes[path_key] break @@ -448,13 +488,15 @@ def _get_strategy(self, key): return self._strategies[key] except KeyError: cls = self._strategy_lookup(*key) - self._strategies[key] = self._strategies[cls] = strategy = cls(self) + self._strategies[key] = self._strategies[ + cls] = strategy = cls(self) return strategy def _get_strategy_by_cls(self, cls): return self._get_strategy(cls._strategy_keys[0]) - def setup(self, context, entity, path, adapter, **kwargs): + def setup( + self, context, entity, path, adapter, **kwargs): loader = self._get_context_loader(context, path) if loader and loader.strategy: strat = self._get_strategy(loader.strategy) @@ -462,33 +504,38 @@ def setup(self, context, entity, path, adapter, **kwargs): strat = self.strategy strat.setup_query(context, entity, path, loader, adapter, **kwargs) - def create_row_processor(self, context, path, mapper, row, adapter): + def create_row_processor( + self, context, path, mapper, + result, adapter, populators): loader = self._get_context_loader(context, path) if loader and loader.strategy: strat = self._get_strategy(loader.strategy) else: strat = self.strategy - return strat.create_row_processor(context, path, loader, - mapper, row, adapter) + strat.create_row_processor( + context, path, loader, + mapper, result, adapter, populators) def do_init(self): self._strategies = {} self.strategy = self._get_strategy_by_cls(self.strategy_class) def post_instrument_class(self, mapper): - if self.is_primary() and \ - not mapper.class_manager._attr_has_impl(self.key): + if not self.parent.non_primary and \ + not mapper.class_manager._attr_has_impl(self.key): self.strategy.init_class_attribute(mapper) - - _strategies = collections.defaultdict(dict) + _all_strategies = collections.defaultdict(dict) @classmethod def strategy_for(cls, **kw): def decorate(dec_cls): - dec_cls._strategy_keys = [] + # ensure each subclass of the strategy has its + # own _strategy_keys collection + if '_strategy_keys' not in dec_cls.__dict__: + dec_cls._strategy_keys = [] key = tuple(sorted(kw.items())) - cls._strategies[cls][key] = dec_cls + cls._all_strategies[cls][key] = dec_cls dec_cls._strategy_keys.append(key) return dec_cls return decorate @@ -496,8 +543,8 @@ def decorate(dec_cls): @classmethod def _strategy_lookup(cls, *key): for prop_cls in cls.__mro__: - if prop_cls in cls._strategies: - strategies = cls._strategies[prop_cls] + if prop_cls in cls._all_strategies: + strategies = cls._all_strategies[prop_cls] try: return strategies[key] except KeyError: @@ -510,22 +557,26 @@ class MapperOption(object): propagate_to_loaders = False """if True, indicate this option should be carried along - Query object generated by scalar or object lazy loaders. + to "secondary" Query objects produced during lazy loads + or refresh operations. + """ def process_query(self, query): - pass + """Apply a modification to the given :class:`.Query`.""" def process_query_conditionally(self, query): """same as process_query(), except that this option may not apply to the given query. - Used when secondary loaders resend existing options to a new - Query.""" - - self.process_query(query) + This is typically used during a lazy load or scalar refresh + operation to propagate options stated in the original Query to the + new Query being used for the load. It occurs for those options that + specify propagate_to_loaders=True. + """ + self.process_query(query) class LoaderStrategy(object): @@ -542,15 +593,18 @@ class LoaderStrategy(object): * it processes the ``QueryContext`` at statement construction time, where it can modify the SQL statement that is being produced. - Simple column attributes may add their represented column to the - list of selected columns, *eager loading* properties may add - ``LEFT OUTER JOIN`` clauses to the statement. + For example, simple column attributes will add their represented + column to the list of selected columns, a joined eager loader + may establish join clauses to add to the statement. * It produces "row processor" functions at result fetching time. These "row processor" functions populate a particular attribute on a particular mapped instance. """ + + __slots__ = 'parent_property', 'is_class_level', 'parent', 'key' + def __init__(self, parent): self.parent_property = parent self.is_class_level = False @@ -561,17 +615,26 @@ def init_class_attribute(self, mapper): pass def setup_query(self, context, entity, path, loadopt, adapter, **kwargs): - pass + """Establish column and other state for a given QueryContext. + + This method fulfills the contract specified by MapperProperty.setup(). + + StrategizedProperty delegates its setup() method + directly to this method. + + """ def create_row_processor(self, context, path, loadopt, mapper, - row, adapter): - """Return row processing functions which fulfill the contract - specified by MapperProperty.create_row_processor. + result, adapter, populators): + """Establish row processing functions for a given QueryContext. - StrategizedProperty delegates its create_row_processor method - directly to this method. """ + This method fulfills the contract specified by + MapperProperty.create_row_processor(). - return None, None, None + StrategizedProperty delegates its create_row_processor() method + directly to this method. + + """ def __str__(self): return str(self.parent_property) diff --git a/lib/sqlalchemy/orm/loading.py b/lib/sqlalchemy/orm/loading.py index b79ea429c6..6f73456a47 100644 --- a/lib/sqlalchemy/orm/loading.py +++ b/lib/sqlalchemy/orm/loading.py @@ -1,5 +1,6 @@ # orm/loading.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -11,30 +12,31 @@ as well as some of the attribute loading strategies. """ - +from __future__ import absolute_import from .. import util -from . import attributes, exc as orm_exc, state as statelib -from .interfaces import EXT_CONTINUE +from . import attributes, exc as orm_exc from ..sql import util as sql_util +from . import strategy_options + from .util import _none_set, state_str +from .base import _SET_DEFERRED_EXPIRED, _DEFER_FOR_STATE from .. import exc as sa_exc +import collections _new_runid = util.counter() def instances(query, cursor, context): """Return an ORM result as an iterator.""" - session = query.session context.runid = _new_runid() - filter_fns = [ent.filter_fn - for ent in query._entities] + filter_fns = [ent.filter_fn for ent in query._entities] filtered = id in filter_fns single_entity = len(query._entities) == 1 and \ - query._entities[0].supports_single_entity + query._entities[0].supports_single_entity if filtered: if single_entity: @@ -43,59 +45,45 @@ def instances(query, cursor, context): def filter_fn(row): return tuple(fn(x) for x, fn in zip(row, filter_fns)) - custom_rows = single_entity and \ - query._entities[0].custom_rows - - (process, labels) = \ - list(zip(*[ - query_entity.row_processor(query, - context, custom_rows) - for query_entity in query._entities - ])) - - while True: - context.progress = {} - context.partials = {} - - if query._yield_per: - fetch = cursor.fetchmany(query._yield_per) - if not fetch: - break - else: - fetch = cursor.fetchall() - - if custom_rows: - rows = [] - for row in fetch: - process[0](row, rows) - elif single_entity: - rows = [process[0](row, None) for row in fetch] - else: - rows = [util.KeyedTuple([proc(row, None) for proc in process], - labels) for row in fetch] - - if filtered: - rows = util.unique_list(rows, filter_fn) - - if context.refresh_state and query._only_load_props \ - and context.refresh_state in context.progress: - context.refresh_state._commit( - context.refresh_state.dict, query._only_load_props) - context.progress.pop(context.refresh_state) + try: + (process, labels) = \ + list(zip(*[ + query_entity.row_processor(query, + context, cursor) + for query_entity in query._entities + ])) + + if not single_entity: + keyed_tuple = util.lightweight_named_tuple('result', labels) + + while True: + context.partials = {} + + if query._yield_per: + fetch = cursor.fetchmany(query._yield_per) + if not fetch: + break + else: + fetch = cursor.fetchall() - statelib.InstanceState._commit_all_states( - list(context.progress.items()), - session.identity_map - ) + if single_entity: + proc = process[0] + rows = [proc(row) for row in fetch] + else: + rows = [keyed_tuple([proc(row) for proc in process]) + for row in fetch] - for state, (dict_, attrs) in context.partials.items(): - state._commit(dict_, attrs) + if filtered: + rows = util.unique_list(rows, filter_fn) - for row in rows: - yield row + for row in rows: + yield row - if not query._yield_per: - break + if not query._yield_per: + break + except Exception as err: + cursor.close() + util.raise_from_cause(err) @util.dependencies("sqlalchemy.orm.query") @@ -114,26 +102,27 @@ def merge_result(querylib, query, iterator, load=True): if single_entity: if isinstance(query._entities[0], querylib._MapperEntity): result = [session._merge( - attributes.instance_state(instance), - attributes.instance_dict(instance), - load=load, _recursive={}) - for instance in iterator] + attributes.instance_state(instance), + attributes.instance_dict(instance), + load=load, _recursive={}) + for instance in iterator] else: result = list(iterator) else: mapped_entities = [i for i, e in enumerate(query._entities) - if isinstance(e, querylib._MapperEntity)] + if isinstance(e, querylib._MapperEntity)] result = [] keys = [ent._label_name for ent in query._entities] + keyed_tuple = util.lightweight_named_tuple('result', keys) for row in iterator: newrow = list(row) for i in mapped_entities: if newrow[i] is not None: newrow[i] = session._merge( - attributes.instance_state(newrow[i]), - attributes.instance_dict(newrow[i]), - load=load, _recursive={}) - result.append(util.KeyedTuple(newrow, keys)) + attributes.instance_state(newrow[i]), + attributes.instance_dict(newrow[i]), + load=load, _recursive={}) + result.append(keyed_tuple(newrow)) return iter(result) finally: @@ -160,7 +149,7 @@ def get_from_identity(session, key, passive): # expired state will be checked soon enough, if necessary return instance try: - state(state, passive) + state._load_expired(state, passive) except orm_exc.ObjectDeletedError: session._remove_newly_deleted([state]) return None @@ -170,8 +159,8 @@ def get_from_identity(session, key, passive): def load_on_ident(query, key, - refresh_state=None, lockmode=None, - only_load_props=None): + refresh_state=None, lockmode=None, + only_load_props=None): """Load the given identity key from the database.""" if key is not None: @@ -195,10 +184,10 @@ def load_on_ident(query, key, if None in ident: nones = set([ _get_params[col].key for col, value in - zip(mapper.primary_key, ident) if value is None + zip(mapper.primary_key, ident) if value is None ]) _get_clause = sql_util.adapt_criterion_to_null( - _get_clause, nones) + _get_clause, nones) _get_clause = q._adapt_clause(_get_clause, True, False) q._criterion = _get_clause @@ -232,12 +221,56 @@ def load_on_ident(query, key, return None -def instance_processor(mapper, context, path, adapter, - polymorphic_from=None, - only_load_props=None, - refresh_state=None, - polymorphic_discriminator=None): +def _setup_entity_query( + context, mapper, query_entity, + path, adapter, column_collection, + with_polymorphic=None, only_load_props=None, + polymorphic_discriminator=None, **kw): + + if with_polymorphic: + poly_properties = mapper._iterate_polymorphic_properties( + with_polymorphic) + else: + poly_properties = mapper._polymorphic_properties + + quick_populators = {} + + path.set( + context.attributes, + "memoized_setups", + quick_populators) + + for value in poly_properties: + if only_load_props and \ + value.key not in only_load_props: + continue + value.setup( + context, + query_entity, + path, + adapter, + only_load_props=only_load_props, + column_collection=column_collection, + memoized_populators=quick_populators, + **kw + ) + + if polymorphic_discriminator is not None and \ + polymorphic_discriminator \ + is not mapper.polymorphic_on: + + if adapter: + pd = adapter.columns[polymorphic_discriminator] + else: + pd = polymorphic_discriminator + column_collection.append(pd) + +def _instance_processor( + mapper, context, result, path, adapter, + only_load_props=None, refresh_state=None, + polymorphic_discriminator=None, + _polymorphic_from=None): """Produce a mapper level row processor callable which processes rows into mapped instances.""" @@ -249,329 +282,353 @@ def instance_processor(mapper, context, path, adapter, pk_cols = mapper.primary_key - if polymorphic_from or refresh_state: - polymorphic_on = None - else: - if polymorphic_discriminator is not None: - polymorphic_on = polymorphic_discriminator - else: - polymorphic_on = mapper.polymorphic_on - polymorphic_instances = util.PopulateDict( - _configure_subclass_mapper( - mapper, - context, path, adapter) - ) - - version_id_col = mapper.version_id_col - if adapter: pk_cols = [adapter.columns[c] for c in pk_cols] - if polymorphic_on is not None: - polymorphic_on = adapter.columns[polymorphic_on] - if version_id_col is not None: - version_id_col = adapter.columns[version_id_col] identity_class = mapper._identity_class - new_populators = [] - existing_populators = [] - eager_populators = [] - - load_path = context.query._current_path + path \ - if context.query._current_path.path \ - else path - - def populate_state(state, dict_, row, isnew, only_load_props): - if isnew: - if context.propagate_options: - state.load_options = context.propagate_options - if state.load_options: - state.load_path = load_path - - if not new_populators: - _populators(mapper, context, path, row, adapter, - new_populators, - existing_populators, - eager_populators - ) - - if isnew: - populators = new_populators + populators = collections.defaultdict(list) + + props = mapper._prop_set + if only_load_props is not None: + props = props.intersection( + mapper._props[k] for k in only_load_props) + + quick_populators = path.get( + context.attributes, "memoized_setups", _none_set) + + for prop in props: + if prop in quick_populators: + # this is an inlined path just for column-based attributes. + col = quick_populators[prop] + if col is _DEFER_FOR_STATE: + populators["new"].append( + (prop.key, prop._deferred_column_loader)) + elif col is _SET_DEFERRED_EXPIRED: + # note that in this path, we are no longer + # searching in the result to see if the column might + # be present in some unexpected way. + populators["expire"].append((prop.key, False)) + else: + if adapter: + col = adapter.columns[col] + getter = result._getter(col, False) + if getter: + populators["quick"].append((prop.key, getter)) + else: + # fall back to the ColumnProperty itself, which + # will iterate through all of its columns + # to see if one fits + prop.create_row_processor( + context, path, mapper, result, adapter, populators) else: - populators = existing_populators + prop.create_row_processor( + context, path, mapper, result, adapter, populators) - if only_load_props is None: - for key, populator in populators: - populator(state, dict_, row) - elif only_load_props: - for key, populator in populators: - if key in only_load_props: - populator(state, dict_, row) + propagate_options = context.propagate_options + if propagate_options: + load_path = context.query._current_path + path \ + if context.query._current_path.path else path session_identity_map = context.session.identity_map - listeners = mapper.dispatch - - translate_row = listeners.translate_row or None - create_instance = listeners.create_instance or None - populate_instance = listeners.populate_instance or None - append_result = listeners.append_result or None populate_existing = context.populate_existing or mapper.always_refresh - invoke_all_eagers = context.invoke_all_eagers + load_evt = bool(mapper.class_manager.dispatch.load) + refresh_evt = bool(mapper.class_manager.dispatch.refresh) + instance_state = attributes.instance_state + instance_dict = attributes.instance_dict + session_id = context.session.hash_key + version_check = context.version_check + runid = context.runid + + if refresh_state: + refresh_identity_key = refresh_state.key + if refresh_identity_key is None: + # super-rare condition; a refresh is being called + # on a non-instance-key instance; this is meant to only + # occur within a flush() + refresh_identity_key = \ + mapper._identity_key_from_state(refresh_state) + else: + refresh_identity_key = None if mapper.allow_partial_pks: is_not_primary_key = _none_set.issuperset else: - is_not_primary_key = _none_set.issubset - - def _instance(row, result): - if not new_populators and invoke_all_eagers: - _populators(mapper, context, path, row, adapter, - new_populators, - existing_populators, - eager_populators - ) + is_not_primary_key = _none_set.intersection - if translate_row: - for fn in translate_row: - ret = fn(mapper, context, row) - if ret is not EXT_CONTINUE: - row = ret - break + def _instance(row): - if polymorphic_on is not None: - discriminator = row[polymorphic_on] - if discriminator is not None: - _instance = polymorphic_instances[discriminator] - if _instance: - return _instance(row, result) - - # determine identity key - if refresh_state: - identitykey = refresh_state.key - if identitykey is None: - # super-rare condition; a refresh is being called - # on a non-instance-key instance; this is meant to only - # occur within a flush() - identitykey = mapper._identity_key_from_state(refresh_state) + # determine the state that we'll be populating + if refresh_identity_key: + # fixed state that we're refreshing + state = refresh_state + instance = state.obj() + dict_ = instance_dict(instance) + isnew = state.runid != runid + currentload = True + loaded_instance = False else: + # look at the row, see if that identity is in the + # session, or we have to create a new one identitykey = ( - identity_class, - tuple([row[column] for column in pk_cols]) - ) + identity_class, + tuple([row[column] for column in pk_cols]) + ) - instance = session_identity_map.get(identitykey) + instance = session_identity_map.get(identitykey) - if instance is not None: - state = attributes.instance_state(instance) - dict_ = attributes.instance_dict(instance) + if instance is not None: + # existing instance + state = instance_state(instance) + dict_ = instance_dict(instance) - isnew = state.runid != context.runid - currentload = not isnew - loaded_instance = False + isnew = state.runid != runid + currentload = not isnew + loaded_instance = False - if not currentload and \ - version_id_col is not None and \ - context.version_check and \ - mapper._get_state_attr_by_column( - state, - dict_, - mapper.version_id_col) != \ - row[version_id_col]: - - raise orm_exc.StaleDataError( - "Instance '%s' has version id '%s' which " - "does not match database-loaded version id '%s'." - % (state_str(state), - mapper._get_state_attr_by_column( - state, dict_, - mapper.version_id_col), - row[version_id_col])) - elif refresh_state: - # out of band refresh_state detected (i.e. its not in the - # session.identity_map) honor it anyway. this can happen - # if a _get() occurs within save_obj(), such as - # when eager_defaults is True. - state = refresh_state - instance = state.obj() - dict_ = attributes.instance_dict(instance) - isnew = state.runid != context.runid - currentload = True - loaded_instance = False - else: - # check for non-NULL values in the primary key columns, - # else no entity is returned for the row - if is_not_primary_key(identitykey[1]): - return None + if version_check and not currentload: + _validate_version_id(mapper, state, dict_, row, adapter) - isnew = True - currentload = True - loaded_instance = True - - if create_instance: - for fn in create_instance: - instance = fn(mapper, context, - row, mapper.class_) - if instance is not EXT_CONTINUE: - manager = attributes.manager_of_class( - instance.__class__) - # TODO: if manager is None, raise a friendly error - # about returning instances of unmapped types - manager.setup_instance(instance) - break - else: - instance = mapper.class_manager.new_instance() else: + # create a new instance + + # check for non-NULL values in the primary key columns, + # else no entity is returned for the row + if is_not_primary_key(identitykey[1]): + return None + + isnew = True + currentload = True + loaded_instance = True + instance = mapper.class_manager.new_instance() - dict_ = attributes.instance_dict(instance) - state = attributes.instance_state(instance) - state.key = identitykey + dict_ = instance_dict(instance) + state = instance_state(instance) + state.key = identitykey - # attach instance to session. - state.session_id = context.session.hash_key - session_identity_map.add(state) + # attach instance to session. + state.session_id = session_id + session_identity_map._add_unpresent(state, identitykey) + # populate. this looks at whether this state is new + # for this load or was existing, and whether or not this + # row is the first row with this identity. if currentload or populate_existing: - # state is being fully loaded, so populate. - # add to the "context.progress" collection. - if isnew: - state.runid = context.runid - context.progress[state] = dict_ - - if populate_instance: - for fn in populate_instance: - ret = fn(mapper, context, row, state, - only_load_props=only_load_props, - instancekey=identitykey, isnew=isnew) - if ret is not EXT_CONTINUE: - break - else: - populate_state(state, dict_, row, isnew, only_load_props) - else: - populate_state(state, dict_, row, isnew, only_load_props) - - if loaded_instance: - state.manager.dispatch.load(state, context) - elif isnew: - state.manager.dispatch.refresh(state, context, only_load_props) - - elif state in context.partials or state.unloaded or eager_populators: - # state is having a partial set of its attributes - # refreshed. Populate those attributes, - # and add to the "context.partials" collection. - if state in context.partials: - isnew = False - (d_, attrs) = context.partials[state] - else: - isnew = True - attrs = state.unloaded - context.partials[state] = (dict_, attrs) - - if populate_instance: - for fn in populate_instance: - ret = fn(mapper, context, row, state, - only_load_props=attrs, - instancekey=identitykey, isnew=isnew) - if ret is not EXT_CONTINUE: - break - else: - populate_state(state, dict_, row, isnew, attrs) - else: - populate_state(state, dict_, row, isnew, attrs) + # full population routines. Objects here are either + # just created, or we are doing a populate_existing + + if isnew and propagate_options: + state.load_options = propagate_options + state.load_path = load_path - for key, pop in eager_populators: - if key not in state.unloaded: - pop(state, dict_, row) + _populate_full( + context, row, state, dict_, isnew, + loaded_instance, populate_existing, populators) if isnew: - state.manager.dispatch.refresh(state, context, attrs) - - if result is not None: - if append_result: - for fn in append_result: - if fn(mapper, context, row, state, - result, instancekey=identitykey, - isnew=isnew) is not EXT_CONTINUE: - break - else: - result.append(instance) - else: - result.append(instance) + if loaded_instance and load_evt: + state.manager.dispatch.load(state, context) + elif refresh_evt: + state.manager.dispatch.refresh( + state, context, only_load_props) + + if populate_existing or state.modified: + if refresh_state and only_load_props: + state._commit(dict_, only_load_props) + else: + state._commit_all(dict_, session_identity_map) + + else: + # partial population routines, for objects that were already + # in the Session, but a row matches them; apply eager loaders + # on existing objects, etc. + unloaded = state.unloaded + isnew = state not in context.partials + + if not isnew or unloaded or populators["eager"]: + # state is having a partial set of its attributes + # refreshed. Populate those attributes, + # and add to the "context.partials" collection. + + to_load = _populate_partial( + context, row, state, dict_, isnew, + unloaded, populators) + + if isnew: + if refresh_evt: + state.manager.dispatch.refresh( + state, context, to_load) + + state._commit(dict_, to_load) return instance + + if mapper.polymorphic_map and not _polymorphic_from and not refresh_state: + # if we are doing polymorphic, dispatch to a different _instance() + # method specific to the subclass mapper + _instance = _decorate_polymorphic_switch( + _instance, context, mapper, result, path, + polymorphic_discriminator, adapter) + return _instance -def _populators(mapper, context, path, row, adapter, - new_populators, existing_populators, eager_populators): - """Produce a collection of attribute level row processor - callables.""" +def _populate_full( + context, row, state, dict_, isnew, + loaded_instance, populate_existing, populators): + if isnew: + # first time we are seeing a row with this identity. + state.runid = context.runid + + for key, getter in populators["quick"]: + dict_[key] = getter(row) + if populate_existing: + for key, set_callable in populators["expire"]: + dict_.pop(key, None) + if set_callable: + state.expired_attributes.add(key) + else: + for key, set_callable in populators["expire"]: + if set_callable: + state.expired_attributes.add(key) + for key, populator in populators["new"]: + populator(state, dict_, row) + for key, populator in populators["delayed"]: + populator(state, dict_, row) + else: + # have already seen rows with this identity. + for key, populator in populators["existing"]: + populator(state, dict_, row) + + +def _populate_partial( + context, row, state, dict_, isnew, + unloaded, populators): + if not isnew: + to_load = context.partials[state] + for key, populator in populators["existing"]: + if key in to_load: + populator(state, dict_, row) + else: + to_load = unloaded + context.partials[state] = to_load + + for key, getter in populators["quick"]: + if key in to_load: + dict_[key] = getter(row) + for key, set_callable in populators["expire"]: + if key in to_load: + dict_.pop(key, None) + if set_callable: + state.expired_attributes.add(key) + for key, populator in populators["new"]: + if key in to_load: + populator(state, dict_, row) + for key, populator in populators["delayed"]: + if key in to_load: + populator(state, dict_, row) + for key, populator in populators["eager"]: + if key not in unloaded: + populator(state, dict_, row) + + return to_load - delayed_populators = [] - pops = (new_populators, existing_populators, delayed_populators, - eager_populators) - for prop in mapper._props.values(): +def _validate_version_id(mapper, state, dict_, row, adapter): - for i, pop in enumerate(prop.create_row_processor( - context, - path, - mapper, row, adapter)): - if pop is not None: - pops[i].append((prop.key, pop)) + version_id_col = mapper.version_id_col - if delayed_populators: - new_populators.extend(delayed_populators) + if version_id_col is None: + return + if adapter: + version_id_col = adapter.columns[version_id_col] + + if mapper._get_state_attr_by_column( + state, dict_, mapper.version_id_col) != row[version_id_col]: + raise orm_exc.StaleDataError( + "Instance '%s' has version id '%s' which " + "does not match database-loaded version id '%s'." + % (state_str(state), mapper._get_state_attr_by_column( + state, dict_, mapper.version_id_col), + row[version_id_col])) + + +def _decorate_polymorphic_switch( + instance_fn, context, mapper, result, path, + polymorphic_discriminator, adapter): + if polymorphic_discriminator is not None: + polymorphic_on = polymorphic_discriminator + else: + polymorphic_on = mapper.polymorphic_on + if polymorphic_on is None: + return instance_fn -def _configure_subclass_mapper(mapper, context, path, adapter): - """Produce a mapper level row processor callable factory for mappers - inheriting this one.""" + if adapter: + polymorphic_on = adapter.columns[polymorphic_on] def configure_subclass_mapper(discriminator): try: sub_mapper = mapper.polymorphic_map[discriminator] except KeyError: raise AssertionError( - "No such polymorphic_identity %r is defined" % - discriminator) - if sub_mapper is mapper: - return None + "No such polymorphic_identity %r is defined" % + discriminator) + else: + if sub_mapper is mapper: + return None + + return _instance_processor( + sub_mapper, context, result, + path, adapter, _polymorphic_from=mapper) - return instance_processor( - sub_mapper, - context, - path, - adapter, - polymorphic_from=mapper) - return configure_subclass_mapper + polymorphic_instances = util.PopulateDict( + configure_subclass_mapper + ) + + def polymorphic_instance(row): + discriminator = row[polymorphic_on] + if discriminator is not None: + _instance = polymorphic_instances[discriminator] + if _instance: + return _instance(row) + return instance_fn(row) + return polymorphic_instance def load_scalar_attributes(mapper, state, attribute_names): """initiate a column-based attribute refresh operation.""" - #assert mapper is _state_mapper(state) + # assert mapper is _state_mapper(state) session = state.session if not session: raise orm_exc.DetachedInstanceError( - "Instance %s is not bound to a Session; " - "attribute refresh operation cannot proceed" % - (state_str(state))) + "Instance %s is not bound to a Session; " + "attribute refresh operation cannot proceed" % + (state_str(state))) has_key = bool(state.key) result = False if mapper.inherits and not mapper.concrete: + # because we are using Core to produce a select() that we + # pass to the Query, we aren't calling setup() for mapped + # attributes; in 1.0 this means deferred attrs won't get loaded + # by default statement = mapper._optimized_get_statement(state, attribute_names) if statement is not None: result = load_on_ident( - session.query(mapper).from_statement(statement), - None, - only_load_props=attribute_names, - refresh_state=state - ) + session.query(mapper). + options( + strategy_options.Load(mapper).undefer("*") + ).from_statement(statement), + None, + only_load_props=attribute_names, + refresh_state=state + ) if result is False: if has_key: @@ -585,25 +642,26 @@ def load_scalar_attributes(mapper, state, attribute_names): for col in mapper.primary_key] if state.expired_attributes.intersection(pk_attrs): raise sa_exc.InvalidRequestError( - "Instance %s cannot be refreshed - it's not " - " persistent and does not " - "contain a full primary key." % state_str(state)) + "Instance %s cannot be refreshed - it's not " + " persistent and does not " + "contain a full primary key." % state_str(state)) identity_key = mapper._identity_key_from_state(state) - if (_none_set.issubset(identity_key) and \ + if (_none_set.issubset(identity_key) and not mapper.allow_partial_pks) or \ _none_set.issuperset(identity_key): - util.warn("Instance %s to be refreshed doesn't " - "contain a full primary key - can't be refreshed " - "(and shouldn't be expired, either)." - % state_str(state)) + util.warn_limited( + "Instance %s to be refreshed doesn't " + "contain a full primary key - can't be refreshed " + "(and shouldn't be expired, either).", + state_str(state)) return result = load_on_ident( - session.query(mapper), - identity_key, - refresh_state=state, - only_load_props=attribute_names) + session.query(mapper), + identity_key, + refresh_state=state, + only_load_props=attribute_names) # if instance is pending, a refresh operation # may not complete (even if PK attributes are assigned) diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py index a939cb9c76..2a1b9e6190 100644 --- a/lib/sqlalchemy/orm/mapper.py +++ b/lib/sqlalchemy/orm/mapper.py @@ -1,5 +1,6 @@ # orm/mapper.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -24,10 +25,11 @@ from ..sql import expression, visitors, operators, util as sql_util from . import instrumentation, attributes, exc as orm_exc, loading from . import properties -from .interfaces import MapperProperty, _InspectionAttr, _MappedAttribute +from . import util as orm_util +from .interfaces import MapperProperty, InspectionAttr, _MappedAttribute from .base import _class_to_mapper, _state_mapper, class_mapper, \ - state_str, _INSTRUMENTOR + state_str, _INSTRUMENTOR from .path_registry import PathRegistry import sys @@ -50,7 +52,7 @@ @inspection._self_inspects @log.class_logger -class Mapper(_InspectionAttr): +class Mapper(InspectionAttr): """Define the correlation of class attributes to database table columns. @@ -210,15 +212,16 @@ class will overwrite all data within object instances that already See the section :ref:`concrete_inheritance` for an example. :param confirm_deleted_rows: defaults to True; when a DELETE occurs - of one more more rows based on specific primary keys, a warning is + of one more rows based on specific primary keys, a warning is emitted when the number of rows matched does not equal the number - of rows expected. This parameter may be set to False to handle the case - where database ON DELETE CASCADE rules may be deleting some of those - rows automatically. The warning may be changed to an exception - in a future release. + of rows expected. This parameter may be set to False to handle the + case where database ON DELETE CASCADE rules may be deleting some of + those rows automatically. The warning may be changed to an + exception in a future release. - .. versionadded:: 0.9.4 - added :paramref:`.mapper.confirm_deleted_rows` - as well as conditional matched row checking on delete. + .. versionadded:: 0.9.4 - added + :paramref:`.mapper.confirm_deleted_rows` as well as conditional + matched row checking on delete. :param eager_defaults: if True, the ORM will immediately fetch the value of server-generated default values after an INSERT or UPDATE, @@ -228,8 +231,8 @@ class will overwrite all data within object instances that already this scheme will emit an individual ``SELECT`` statement per row inserted or updated, which note can add significant performance overhead. However, if the - target database supports :term:`RETURNING`, the default values will be - returned inline with the INSERT or UPDATE statement, which can + target database supports :term:`RETURNING`, the default values will + be returned inline with the INSERT or UPDATE statement, which can greatly enhance performance for an application that needs frequent access to just-generated server defaults. @@ -267,10 +270,10 @@ class will overwrite all data within object instances that already define how the two tables are joined; defaults to a natural join between the two tables. - :param inherit_foreign_keys: When ``inherit_condition`` is used and the - columns present are missing a :class:`.ForeignKey` configuration, - this parameter can be used to specify which columns are "foreign". - In most cases can be left as ``None``. + :param inherit_foreign_keys: When ``inherit_condition`` is used and + the columns present are missing a :class:`.ForeignKey` + configuration, this parameter can be used to specify which columns + are "foreign". In most cases can be left as ``None``. :param legacy_is_orphan: Boolean, defaults to ``False``. When ``True``, specifies that "legacy" orphan consideration @@ -278,12 +281,12 @@ class will overwrite all data within object instances that already that a pending (that is, not persistent) object is auto-expunged from an owning :class:`.Session` only when it is de-associated from *all* parents that specify a ``delete-orphan`` cascade towards - this mapper. The new default behavior is that the object is auto-expunged - when it is de-associated with *any* of its parents that specify - ``delete-orphan`` cascade. This behavior is more consistent with - that of a persistent object, and allows behavior to be consistent - in more scenarios independently of whether or not an orphanable - object has been flushed yet or not. + this mapper. The new default behavior is that the object is + auto-expunged when it is de-associated with *any* of its parents + that specify ``delete-orphan`` cascade. This behavior is more + consistent with that of a persistent object, and allows behavior to + be consistent in more scenarios independently of whether or not an + orphanable object has been flushed yet or not. See the change note and example at :ref:`legacy_is_orphan_addition` for more detail on this change. @@ -294,9 +297,9 @@ class will overwrite all data within object instances that already is expunged from the :class:`.Session` as soon as it is de-associated from any of its orphan-enabled parents. Previously, the pending object would be expunged only if de-associated - from all of its orphan-enabled parents. The new flag ``legacy_is_orphan`` - is added to :func:`.orm.mapper` which re-establishes the - legacy behavior. + from all of its orphan-enabled parents. The new flag + ``legacy_is_orphan`` is added to :func:`.orm.mapper` which + re-establishes the legacy behavior. :param non_primary: Specify that this :class:`.Mapper` is in addition to the "primary" mapper, that is, the one used for persistence. @@ -423,6 +426,12 @@ def set_identity(instance, *arg, **kw): thus persisting the value to the ``discriminator`` column in the database. + .. warning:: + + Currently, **only one discriminator column may be set**, typically + on the base-most class in the hierarchy. "Cascading" polymorphic + columns are not yet supported. + .. seealso:: :ref:`inheritance_toplevel` @@ -445,8 +454,8 @@ def set_identity(instance, *arg, **kw): based on all those :class:`.MapperProperty` instances declared in the declared class body. - :param primary_key: A list of :class:`.Column` objects which define the - primary key to be used against this mapper's selectable unit. + :param primary_key: A list of :class:`.Column` objects which define + the primary key to be used against this mapper's selectable unit. This is normally simply the primary key of the ``local_table``, but can be overridden here. @@ -476,13 +485,13 @@ def generate_version(version): return next_version Alternatively, server-side versioning functions such as triggers, - or programmatic versioning schemes outside of the version id generator - may be used, by specifying the value ``False``. + or programmatic versioning schemes outside of the version id + generator may be used, by specifying the value ``False``. Please see :ref:`server_side_version_counter` for a discussion of important points when using this option. - .. versionadded:: 0.9.0 ``version_id_generator`` supports server-side - version number generation. + .. versionadded:: 0.9.0 ``version_id_generator`` supports + server-side version number generation. .. seealso:: @@ -503,7 +512,8 @@ def generate_version(version): .. seealso:: - :ref:`with_polymorphic` - discussion of polymorphic querying techniques. + :ref:`with_polymorphic` - discussion of polymorphic querying + techniques. """ @@ -545,7 +555,7 @@ def generate_version(version): self.eager_defaults = eager_defaults self.column_prefix = column_prefix self.polymorphic_on = expression._clause_element_as_expr( - polymorphic_on) + polymorphic_on) self._dependency_processors = [] self.validators = util.immutabledict() self.passive_updates = passive_updates @@ -572,13 +582,13 @@ def generate_version(version): "an alias() of the construct instead." "This because several databases don't allow a " "SELECT from a subquery that does not have an alias." - ) + ) if self.with_polymorphic and \ - isinstance(self.with_polymorphic[1], - expression.SelectBase): + isinstance(self.with_polymorphic[1], + expression.SelectBase): self.with_polymorphic = (self.with_polymorphic[0], - self.with_polymorphic[1].alias()) + self.with_polymorphic[1].alias()) # our 'polymorphic identity', a string name that when located in a # result set row indicates this Mapper should be used to construct @@ -890,20 +900,20 @@ def _configure_inheritance(self): self.inherits = class_mapper(self.inherits, configure=False) if not issubclass(self.class_, self.inherits.class_): raise sa_exc.ArgumentError( - "Class '%s' does not inherit from '%s'" % - (self.class_.__name__, self.inherits.class_.__name__)) + "Class '%s' does not inherit from '%s'" % + (self.class_.__name__, self.inherits.class_.__name__)) if self.non_primary != self.inherits.non_primary: np = not self.non_primary and "primary" or "non-primary" raise sa_exc.ArgumentError( - "Inheritance of %s mapper for class '%s' is " - "only allowed from a %s mapper" % - (np, self.class_.__name__, np)) + "Inheritance of %s mapper for class '%s' is " + "only allowed from a %s mapper" % + (np, self.class_.__name__, np)) # inherit_condition is optional. if self.local_table is None: self.local_table = self.inherits.local_table self.mapped_table = self.inherits.mapped_table self.single = True - elif not self.local_table is self.inherits.local_table: + elif self.local_table is not self.inherits.local_table: if self.concrete: self.mapped_table = self.local_table for mapper in self.iterate_to_root(): @@ -913,20 +923,21 @@ def _configure_inheritance(self): if self.inherit_condition is None: # figure out inherit condition from our table to the # immediate table of the inherited mapper, not its - # full table which could pull in other stuff we dont + # full table which could pull in other stuff we don't # want (allows test/inheritance.InheritTest4 to pass) self.inherit_condition = sql_util.join_condition( - self.inherits.local_table, - self.local_table) + self.inherits.local_table, + self.local_table) self.mapped_table = sql.join( - self.inherits.mapped_table, - self.local_table, - self.inherit_condition) + self.inherits.mapped_table, + self.local_table, + self.inherit_condition) fks = util.to_set(self.inherit_foreign_keys) - self._inherits_equated_pairs = sql_util.criterion_as_pairs( - self.mapped_table.onclause, - consider_as_foreign_keys=fks) + self._inherits_equated_pairs = \ + sql_util.criterion_as_pairs( + self.mapped_table.onclause, + consider_as_foreign_keys=fks) else: self.mapped_table = self.local_table @@ -939,7 +950,7 @@ def _configure_inheritance(self): self.version_id_col = self.inherits.version_id_col self.version_id_generator = self.inherits.version_id_generator elif self.inherits.version_id_col is not None and \ - self.version_id_col is not self.inherits.version_id_col: + self.version_id_col is not self.inherits.version_id_col: util.warn( "Inheriting version_id_col '%s' does not match inherited " "version_id_col '%s' and will not automatically populate " @@ -947,12 +958,12 @@ def _configure_inheritance(self): "version_id_col should only be specified on " "the base-most mapper that includes versioning." % (self.version_id_col.description, - self.inherits.version_id_col.description) + self.inherits.version_id_col.description) ) if self.order_by is False and \ - not self.concrete and \ - self.inherits.order_by is not False: + not self.concrete and \ + self.inherits.order_by is not False: self.order_by = self.inherits.order_by self.polymorphic_map = self.inherits.polymorphic_map @@ -963,6 +974,15 @@ def _configure_inheritance(self): self._all_tables = self.inherits._all_tables if self.polymorphic_identity is not None: + if self.polymorphic_identity in self.polymorphic_map: + util.warn( + "Reassigning polymorphic association for identity %r " + "from %r to %r: Check for duplicate use of %r as " + "value for polymorphic_identity." % + (self.polymorphic_identity, + self.polymorphic_map[self.polymorphic_identity], + self, self.polymorphic_identity) + ) self.polymorphic_map[self.polymorphic_identity] = self else: @@ -975,14 +995,15 @@ def _configure_inheritance(self): if self.mapped_table is None: raise sa_exc.ArgumentError( - "Mapper '%s' does not have a mapped_table specified." - % self) + "Mapper '%s' does not have a mapped_table specified." + % self) def _set_with_polymorphic(self, with_polymorphic): if with_polymorphic == '*': self.with_polymorphic = ('*', None) elif isinstance(with_polymorphic, (tuple, list)): - if isinstance(with_polymorphic[0], util.string_types + (tuple, list)): + if isinstance( + with_polymorphic[0], util.string_types + (tuple, list)): self.with_polymorphic = with_polymorphic else: self.with_polymorphic = (with_polymorphic, None) @@ -997,13 +1018,13 @@ def _set_with_polymorphic(self, with_polymorphic): "an alias() of the construct instead." "This because several databases don't allow a " "SELECT from a subquery that does not have an alias." - ) + ) if self.with_polymorphic and \ - isinstance(self.with_polymorphic[1], - expression.SelectBase): + isinstance(self.with_polymorphic[1], + expression.SelectBase): self.with_polymorphic = (self.with_polymorphic[0], - self.with_polymorphic[1].alias()) + self.with_polymorphic[1].alias()) if self.configured: self._expire_memoizations() @@ -1030,7 +1051,7 @@ def _set_concrete_base(self, mapper): for key, prop in mapper._props.items(): if key not in self._props and \ not self._should_exclude(key, key, local=False, - column=None): + column=None): self._adapt_inherited_property(key, prop, False) def _set_polymorphic_on(self, polymorphic_on): @@ -1042,7 +1063,7 @@ def _configure_legacy_instrument_class(self): if self.inherits: self.dispatch._update(self.inherits.dispatch) super_extensions = set( - chain(*[m._deprecated_extensions + chain(*[m._deprecated_extensions for m in self.inherits.iterate_to_root()])) else: super_extensions = set() @@ -1054,7 +1075,7 @@ def _configure_legacy_instrument_class(self): def _configure_listeners(self): if self.inherits: super_extensions = set( - chain(*[m._deprecated_extensions + chain(*[m._deprecated_extensions for m in self.inherits.iterate_to_root()])) else: super_extensions = set() @@ -1074,6 +1095,7 @@ def _configure_class_instrumentation(self): auto-session attachment logic. """ + manager = attributes.manager_of_class(self.class_) if self.non_primary: @@ -1096,13 +1118,15 @@ def _configure_class_instrumentation(self): "create a non primary Mapper. clear_mappers() will " "remove *all* current mappers from all classes." % self.class_) - #else: + # else: # a ClassManager may already exist as # ClassManager.instrument_attribute() creates # new managers for each subclass if they don't yet exist. _mapper_registry[self] = True + # note: this *must be called before instrumentation.register_class* + # to maintain the documented behavior of instrument_class self.dispatch.instrument_class(self, self.class_) if manager is None: @@ -1112,7 +1136,7 @@ def _configure_class_instrumentation(self): manager.mapper = self manager.deferred_scalar_loader = util.partial( - loading.load_scalar_attributes, self) + loading.load_scalar_attributes, self) # The remaining members can be added by any mapper, # e_name None or not. @@ -1121,7 +1145,6 @@ def _configure_class_instrumentation(self): event.listen(manager, 'first_init', _event_on_first_init, raw=True) event.listen(manager, 'init', _event_on_init, raw=True) - event.listen(manager, 'resurrect', _event_on_resurrect, raw=True) for key, method in util.iterate_attributes(self.class_): if isinstance(method, types.FunctionType): @@ -1137,7 +1160,6 @@ def _configure_class_instrumentation(self): manager.info[_INSTRUMENTOR] = self - @classmethod def _configure_all(cls): """Class-level path to the :func:`.configure_mappers` call. @@ -1165,8 +1187,8 @@ def _configure_pks(self): self._cols_by_table = {} all_cols = util.column_set(chain(*[ - col.proxy_set for col in - self._columntoproperty])) + col.proxy_set for col in + self._columntoproperty])) pk_cols = util.column_set(c for c in all_cols if c.primary_key) @@ -1178,19 +1200,11 @@ def _configure_pks(self): # ordering is important since it determines the ordering of # mapper.primary_key (and therefore query.get()) self._pks_by_table[t] = \ - util.ordered_column_set(t.primary_key).\ - intersection(pk_cols) + util.ordered_column_set(t.primary_key).\ + intersection(pk_cols) self._cols_by_table[t] = \ - util.ordered_column_set(t.c).\ - intersection(all_cols) - - # determine cols that aren't expressed within our tables; mark these - # as "read only" properties which are refreshed upon INSERT/UPDATE - self._readonly_props = set( - self._columntoproperty[col] - for col in self._columntoproperty - if not hasattr(col, 'table') or - col.table not in self._cols_by_table) + util.ordered_column_set(t.c).\ + intersection(all_cols) # if explicit PK argument sent, add those columns to the # primary key mappings @@ -1202,17 +1216,17 @@ def _configure_pks(self): # otherwise, see that we got a full PK for the mapped table elif self.mapped_table not in self._pks_by_table or \ - len(self._pks_by_table[self.mapped_table]) == 0: - raise sa_exc.ArgumentError( - "Mapper %s could not assemble any primary " - "key columns for mapped table '%s'" % - (self, self.mapped_table.description)) + len(self._pks_by_table[self.mapped_table]) == 0: + raise sa_exc.ArgumentError( + "Mapper %s could not assemble any primary " + "key columns for mapped table '%s'" % + (self, self.mapped_table.description)) elif self.local_table not in self._pks_by_table and \ - isinstance(self.local_table, schema.Table): + isinstance(self.local_table, schema.Table): util.warn("Could not assemble any primary " - "keys for locally mapped table '%s' - " - "no rows will be persisted in this Table." - % self.local_table.description) + "keys for locally mapped table '%s' - " + "no rows will be persisted in this Table." + % self.local_table.description) if self.inherits and \ not self.concrete and \ @@ -1226,12 +1240,12 @@ def _configure_pks(self): if self._primary_key_argument: primary_key = sql_util.reduce_columns( [self.mapped_table.corresponding_column(c) for c in - self._primary_key_argument], + self._primary_key_argument], ignore_nonexistent_tables=True) else: primary_key = sql_util.reduce_columns( - self._pks_by_table[self.mapped_table], - ignore_nonexistent_tables=True) + self._pks_by_table[self.mapped_table], + ignore_nonexistent_tables=True) if len(primary_key) == 0: raise sa_exc.ArgumentError( @@ -1242,6 +1256,15 @@ def _configure_pks(self): self.primary_key = tuple(primary_key) self._log("Identified primary key columns: %s", primary_key) + # determine cols that aren't expressed within our tables; mark these + # as "read only" properties which are refreshed upon INSERT/UPDATE + self._readonly_props = set( + self._columntoproperty[col] + for col in self._columntoproperty + if self._columntoproperty[col] not in self._identity_key_props and + (not hasattr(col, 'table') or + col.table not in self._cols_by_table)) + def _configure_properties(self): # Column and other ClauseElement objects which are mapped @@ -1265,7 +1288,7 @@ def _configure_properties(self): for key, prop in self.inherits._props.items(): if key not in self._props and \ not self._should_exclude(key, key, local=False, - column=None): + column=None): self._adapt_inherited_property(key, prop, False) # create properties for each column in the mapped table, @@ -1277,10 +1300,10 @@ def _configure_properties(self): column_key = (self.column_prefix or '') + column.key if self._should_exclude( - column.key, column_key, - local=self.local_table.c.contains_column(column), - column=column - ): + column.key, column_key, + local=self.local_table.c.contains_column(column), + column=column + ): continue # adjust the "key" used for this column to that @@ -1290,9 +1313,9 @@ def _configure_properties(self): column_key = mapper._columntoproperty[column].key self._configure_property(column_key, - column, - init=False, - setparent=True) + column, + init=False, + setparent=True) def _configure_polymorphic_setter(self, init=False): """Configure an attribute on the mapper representing the @@ -1311,15 +1334,15 @@ def _configure_polymorphic_setter(self, init=False): setter = True if isinstance(self.polymorphic_on, util.string_types): - # polymorphic_on specified as as string - link + # polymorphic_on specified as a string - link # it to mapped ColumnProperty try: self.polymorphic_on = self._props[self.polymorphic_on] except KeyError: raise sa_exc.ArgumentError( - "Can't determine polymorphic_on " - "value '%s' - no attribute is " - "mapped to this name." % self.polymorphic_on) + "Can't determine polymorphic_on " + "value '%s' - no attribute is " + "mapped to this name." % self.polymorphic_on) if self.polymorphic_on in self._columntoproperty: # polymorphic_on is a column that is already mapped @@ -1332,11 +1355,11 @@ def _configure_polymorphic_setter(self, init=False): # polymorphic_on is directly a MapperProperty, # ensure it's a ColumnProperty if not isinstance(self.polymorphic_on, - properties.ColumnProperty): + properties.ColumnProperty): raise sa_exc.ArgumentError( - "Only direct column-mapped " - "property or SQL expression " - "can be passed for polymorphic_on") + "Only direct column-mapped " + "property or SQL expression " + "can be passed for polymorphic_on") prop = self.polymorphic_on self.polymorphic_on = prop.columns[0] polymorphic_key = prop.key @@ -1355,7 +1378,7 @@ def _configure_polymorphic_setter(self, init=False): # 2. a totally standalone SQL expression which we'd # hope is compatible with this mapper's mapped_table col = self.mapped_table.corresponding_column( - self.polymorphic_on) + self.polymorphic_on) if col is None: # polymorphic_on doesn't derive from any # column/expression isn't present in the mapped @@ -1371,15 +1394,14 @@ def _configure_polymorphic_setter(self, init=False): instrument = False col = self.polymorphic_on if isinstance(col, schema.Column) and ( - self.with_polymorphic is None or \ - self.with_polymorphic[1].\ - corresponding_column(col) is None - ): + self.with_polymorphic is None or + self.with_polymorphic[1]. + corresponding_column(col) is None): raise sa_exc.InvalidRequestError( "Could not map polymorphic_on column " "'%s' to the mapped table - polymorphic " "loads will not function properly" - % col.description) + % col.description) else: # column/expression that polymorphic_on derives from # is present in our mapped table @@ -1394,19 +1416,19 @@ def _configure_polymorphic_setter(self, init=False): if key: if self._should_exclude(col.key, col.key, False, col): raise sa_exc.InvalidRequestError( - "Cannot exclude or override the " - "discriminator column %r" % - col.key) + "Cannot exclude or override the " + "discriminator column %r" % + col.key) else: self.polymorphic_on = col = \ - col.label("_sa_polymorphic_on") + col.label("_sa_polymorphic_on") key = col.key self._configure_property( - key, - properties.ColumnProperty(col, - _instrument=instrument), - init=init, setparent=True) + key, + properties.ColumnProperty(col, + _instrument=instrument), + init=init, setparent=True) polymorphic_key = key else: # no polymorphic_on was set. @@ -1422,8 +1444,8 @@ def _configure_polymorphic_setter(self, init=False): self.polymorphic_on = mapper.polymorphic_on else: self.polymorphic_on = \ - self.mapped_table.corresponding_column( - mapper.polymorphic_on) + self.mapped_table.corresponding_column( + mapper.polymorphic_on) # we can use the parent mapper's _set_polymorphic_identity # directly; it ensures the polymorphic_identity of the # instance's mapper is used so is portable to subclasses. @@ -1439,28 +1461,28 @@ def _configure_polymorphic_setter(self, init=False): if setter: def _set_polymorphic_identity(state): dict_ = state.dict - state.get_impl(polymorphic_key).set(state, dict_, - state.manager.mapper.polymorphic_identity, None) + state.get_impl(polymorphic_key).set( + state, dict_, + state.manager.mapper.polymorphic_identity, + None) def _validate_polymorphic_identity(mapper, state, dict_): if polymorphic_key in dict_ and \ - dict_[polymorphic_key] not in \ - mapper._acceptable_polymorphic_identities: - util.warn( - "Flushing object %s with " - "incompatible polymorphic identity %r; the " - "object may not refresh and/or load correctly" % ( - state_str(state), - dict_[polymorphic_key] - ) - ) + dict_[polymorphic_key] not in \ + mapper._acceptable_polymorphic_identities: + util.warn_limited( + "Flushing object %s with " + "incompatible polymorphic identity %r; the " + "object may not refresh and/or load correctly", + (state_str(state), dict_[polymorphic_key]) + ) self._set_polymorphic_identity = _set_polymorphic_identity - self._validate_polymorphic_identity = _validate_polymorphic_identity + self._validate_polymorphic_identity = \ + _validate_polymorphic_identity else: self._set_polymorphic_identity = None - _validate_polymorphic_identity = None @_memoized_configured_property @@ -1483,14 +1505,18 @@ def _acceptable_polymorphic_identities(self): return identities + @_memoized_configured_property + def _prop_set(self): + return frozenset(self._props.values()) + def _adapt_inherited_property(self, key, prop, init): if not self.concrete: self._configure_property(key, prop, init=False, setparent=False) elif key not in self._props: self._configure_property( - key, - properties.ConcreteInheritedProperty(), - init=init, setparent=True) + key, + properties.ConcreteInheritedProperty(), + init=init, setparent=True) def _configure_property(self, key, prop, init=True, setparent=True): self._log("_configure_property(%s, %s)", key, prop.__class__.__name__) @@ -1512,7 +1538,7 @@ def _configure_property(self, key, prop, init=True, setparent=True): for m2 in path: m2.mapped_table._reset_exported() col = self.mapped_table.corresponding_column( - prop.columns[0]) + prop.columns[0]) break path.append(m) @@ -1526,14 +1552,14 @@ def _configure_property(self, key, prop, init=True, setparent=True): if hasattr(self, '_readonly_props') and \ (not hasattr(col, 'table') or col.table not in self._cols_by_table): - self._readonly_props.add(prop) + self._readonly_props.add(prop) else: # if column is coming in after _cols_by_table was # initialized, ensure the col is in the right set if hasattr(self, '_cols_by_table') and \ - col.table in self._cols_by_table and \ - col not in self._cols_by_table[col.table]: + col.table in self._cols_by_table and \ + col not in self._cols_by_table[col.table]: self._cols_by_table[col.table].add(col) # if this properties.ColumnProperty represents the "polymorphic @@ -1541,8 +1567,8 @@ def _configure_property(self, key, prop, init=True, setparent=True): # columns in SELECT statements. if not hasattr(prop, '_is_polymorphic_discriminator'): prop._is_polymorphic_discriminator = \ - (col is self.polymorphic_on or - prop.columns[0] is self.polymorphic_on) + (col is self.polymorphic_on or + prop.columns[0] is self.polymorphic_on) self.columns[key] = col for col in prop.columns + prop._orig_columns: @@ -1558,20 +1584,22 @@ def _configure_property(self, key, prop, init=True, setparent=True): getattr(self._props[key], '_mapped_by_synonym', False): syn = self._props[key]._mapped_by_synonym raise sa_exc.ArgumentError( - "Can't call map_column=True for synonym %r=%r, " - "a ColumnProperty already exists keyed to the name " - "%r for column %r" % (syn, key, key, syn) - ) + "Can't call map_column=True for synonym %r=%r, " + "a ColumnProperty already exists keyed to the name " + "%r for column %r" % (syn, key, key, syn) + ) if key in self._props and \ not isinstance(prop, properties.ColumnProperty) and \ not isinstance(self._props[key], properties.ColumnProperty): util.warn("Property %s on %s being replaced with new " - "property %s; the old property will be discarded" % ( - self._props[key], - self, - prop, - )) + "property %s; the old property will be discarded" % ( + self._props[key], + self, + prop, + )) + oldprop = self._props[key] + self._path_registry.pop(oldprop, None) self._props[key] = prop @@ -1598,29 +1626,38 @@ def _property_from_column(self, key, prop): column = columns[0] if not expression._is_column(column): raise sa_exc.ArgumentError( - "%s=%r is not an instance of MapperProperty or Column" - % (key, prop)) + "%s=%r is not an instance of MapperProperty or Column" + % (key, prop)) prop = self._props.get(key, None) if isinstance(prop, properties.ColumnProperty): - if prop.parent is self: - raise sa_exc.InvalidRequestError( - "Implicitly combining column %s with column " - "%s under attribute '%s'. Please configure one " - "or more attributes for these same-named columns " - "explicitly." - % (prop.columns[-1], column, key)) + if ( + not self._inherits_equated_pairs or + (prop.columns[0], column) not in self._inherits_equated_pairs + ) and \ + not prop.columns[0].shares_lineage(column) and \ + prop.columns[0] is not self.version_id_col and \ + column is not self.version_id_col: + warn_only = prop.parent is not self + msg = ("Implicitly combining column %s with column " + "%s under attribute '%s'. Please configure one " + "or more attributes for these same-named columns " + "explicitly." % (prop.columns[-1], column, key)) + if warn_only: + util.warn(msg) + else: + raise sa_exc.InvalidRequestError(msg) # existing properties.ColumnProperty from an inheriting # mapper. make a copy and append our column to it prop = prop.copy() prop.columns.insert(0, column) self._log("inserting column to existing list " - "in properties.ColumnProperty %s" % (key)) + "in properties.ColumnProperty %s" % (key)) return prop elif prop is None or isinstance(prop, - properties.ConcreteInheritedProperty): + properties.ConcreteInheritedProperty): mapped_column = [] for c in columns: mc = self.mapped_table.corresponding_column(c) @@ -1635,11 +1672,11 @@ def _property_from_column(self, key, prop): mc = self.mapped_table.corresponding_column(c) if mc is None: raise sa_exc.ArgumentError( - "When configuring property '%s' on %s, " - "column '%s' is not represented in the mapper's " - "table. Use the `column_property()` function to " - "force this column to be mapped as a read-only " - "attribute." % (key, self, c)) + "When configuring property '%s' on %s, " + "column '%s' is not represented in the mapper's " + "table. Use the `column_property()` function to " + "force this column to be mapped as a read-only " + "attribute." % (key, self, c)) mapped_column.append(mc) return properties.ColumnProperty(*mapped_column) else: @@ -1710,7 +1747,7 @@ def _log_desc(self): self.local_table.description or str(self.local_table)) +\ (self.non_primary and - "|non-primary" or "") + ")" + "|non-primary" or "") + ")" def _log(self, msg, *args): self.logger.info( @@ -1741,7 +1778,7 @@ def _is_orphan(self, state): orphan_possible = True has_parent = attributes.manager_of_class(cls).has_parent( - state, key, optimistic=state.has_identity) + state, key, optimistic=state.has_identity) if self.legacy_is_orphan and has_parent: return False @@ -1767,7 +1804,7 @@ def get_property(self, key, _configure_mappers=True): return self._props[key] except KeyError: raise sa_exc.InvalidRequestError( - "Mapper '%s' has no property '%s'" % (self, key)) + "Mapper '%s' has no property '%s'" % (self, key)) def get_property_by_column(self, column): """Given a :class:`.Column` object, return the @@ -1798,8 +1835,8 @@ def _mappers_from_spec(self, spec, selectable): m = _class_to_mapper(m) if not m.isa(self): raise sa_exc.InvalidRequestError( - "%r does not inherit from %r" % - (m, self)) + "%r does not inherit from %r" % + (m, self)) if selectable is None: mappers.update(m.iterate_to_root()) @@ -1811,7 +1848,7 @@ def _mappers_from_spec(self, spec, selectable): if selectable is not None: tables = set(sql_util.find_tables(selectable, - include_aliases=True)) + include_aliases=True)) mappers = [m for m in mappers if m.local_table in tables] return mappers @@ -1827,23 +1864,23 @@ def _selectable_from_mappers(self, mappers, innerjoin): continue if m.concrete: raise sa_exc.InvalidRequestError( - "'with_polymorphic()' requires 'selectable' argument " - "when concrete-inheriting mappers are used.") + "'with_polymorphic()' requires 'selectable' argument " + "when concrete-inheriting mappers are used.") elif not m.single: if innerjoin: from_obj = from_obj.join(m.local_table, - m.inherit_condition) + m.inherit_condition) else: from_obj = from_obj.outerjoin(m.local_table, - m.inherit_condition) + m.inherit_condition) return from_obj @_memoized_configured_property def _single_table_criterion(self): if self.single and \ - self.inherits and \ - self.polymorphic_on is not None: + self.inherits and \ + self.polymorphic_on is not None: return self.polymorphic_on.in_( m.polymorphic_identity for m in self.self_and_descendants) @@ -1868,8 +1905,8 @@ def _with_polymorphic_selectable(self): return selectable else: return self._selectable_from_mappers( - self._mappers_from_spec(spec, selectable), - False) + self._mappers_from_spec(spec, selectable), + False) with_polymorphic_mappers = _with_polymorphic_mappers """The list of :class:`.Mapper` objects included in the @@ -1877,6 +1914,66 @@ def _with_polymorphic_selectable(self): """ + @_memoized_configured_property + def _insert_cols_as_none(self): + return dict( + ( + table, + frozenset( + col.key for col in columns + if not col.primary_key and + not col.server_default and not col.default) + ) + for table, columns in self._cols_by_table.items() + ) + + @_memoized_configured_property + def _propkey_to_col(self): + return dict( + ( + table, + dict( + (self._columntoproperty[col].key, col) + for col in columns + ) + ) + for table, columns in self._cols_by_table.items() + ) + + @_memoized_configured_property + def _pk_keys_by_table(self): + return dict( + ( + table, + frozenset([col.key for col in pks]) + ) + for table, pks in self._pks_by_table.items() + ) + + @_memoized_configured_property + def _server_default_cols(self): + return dict( + ( + table, + frozenset([ + col.key for col in columns + if col.server_default is not None]) + ) + for table, columns in self._cols_by_table.items() + ) + + @_memoized_configured_property + def _server_onupdate_default_cols(self): + return dict( + ( + table, + frozenset([ + col.key for col in columns + if col.server_onupdate is not None]) + ) + for table, columns in self._cols_by_table.items() + ) + @property def selectable(self): """The :func:`.select` construct this :class:`.Mapper` selects from @@ -1890,7 +1987,7 @@ def selectable(self): return self._with_polymorphic_selectable def _with_polymorphic_args(self, spec=None, selectable=False, - innerjoin=False): + innerjoin=False): if self.with_polymorphic: if not spec: spec = self.with_polymorphic[0] @@ -1903,14 +2000,13 @@ def _with_polymorphic_args(self, spec=None, selectable=False, return mappers, selectable else: return mappers, self._selectable_from_mappers(mappers, - innerjoin) + innerjoin) @_memoized_configured_property def _polymorphic_properties(self): return list(self._iterate_polymorphic_properties( self._with_polymorphic_mappers)) - def _iterate_polymorphic_properties(self, mappers=None): """Return an iterator of MapperProperty objects which will render into a SELECT.""" @@ -1926,14 +2022,14 @@ def _iterate_polymorphic_properties(self, mappers=None): # mapper's polymorphic selectable (which we don't want rendered) for c in util.unique_list( chain(*[ - list(mapper.iterate_properties) for mapper in - [self] + mappers - ]) + list(mapper.iterate_properties) for mapper in + [self] + mappers + ]) ): if getattr(c, '_is_polymorphic_discriminator', False) and \ (self.polymorphic_on is None or - c.columns[0] is not self.polymorphic_on): - continue + c.columns[0] is not self.polymorphic_on): + continue yield c @util.memoized_property @@ -1954,6 +2050,17 @@ def attrs(self): returned, inclding :attr:`.synonyms`, :attr:`.column_attrs`, :attr:`.relationships`, and :attr:`.composites`. + .. warning:: + + The :attr:`.Mapper.attrs` accessor namespace is an + instance of :class:`.OrderedProperties`. This is + a dictionary-like object which includes a small number of + named methods such as :meth:`.OrderedProperties.items` + and :meth:`.OrderedProperties.values`. When + accessing attributes dynamically, favor using the dict-access + scheme, e.g. ``mapper.attrs[somename]`` over + ``getattr(mapper.attrs, somename)`` to avoid name collisions. + .. seealso:: :attr:`.Mapper.all_orm_descriptors` @@ -1965,27 +2072,41 @@ def attrs(self): @util.memoized_property def all_orm_descriptors(self): - """A namespace of all :class:`._InspectionAttr` attributes associated + """A namespace of all :class:`.InspectionAttr` attributes associated with the mapped class. - These attributes are in all cases Python :term:`descriptors` associated - with the mapped class or its superclasses. + These attributes are in all cases Python :term:`descriptors` + associated with the mapped class or its superclasses. This namespace includes attributes that are mapped to the class as well as attributes declared by extension modules. It includes any Python descriptor type that inherits from - :class:`._InspectionAttr`. This includes :class:`.QueryableAttribute`, - as well as extension types such as :class:`.hybrid_property`, - :class:`.hybrid_method` and :class:`.AssociationProxy`. + :class:`.InspectionAttr`. This includes + :class:`.QueryableAttribute`, as well as extension types such as + :class:`.hybrid_property`, :class:`.hybrid_method` and + :class:`.AssociationProxy`. To distinguish between mapped attributes and extension attributes, - the attribute :attr:`._InspectionAttr.extension_type` will refer + the attribute :attr:`.InspectionAttr.extension_type` will refer to a constant that distinguishes between different extension types. When dealing with a :class:`.QueryableAttribute`, the :attr:`.QueryableAttribute.property` attribute refers to the - :class:`.MapperProperty` property, which is what you get when referring - to the collection of mapped properties via :attr:`.Mapper.attrs`. + :class:`.MapperProperty` property, which is what you get when + referring to the collection of mapped properties via + :attr:`.Mapper.attrs`. + + .. warning:: + + The :attr:`.Mapper.all_orm_descriptors` accessor namespace is an + instance of :class:`.OrderedProperties`. This is + a dictionary-like object which includes a small number of + named methods such as :meth:`.OrderedProperties.items` + and :meth:`.OrderedProperties.values`. When + accessing attributes dynamically, favor using the dict-access + scheme, e.g. ``mapper.all_orm_descriptors[somename]`` over + ``getattr(mapper.all_orm_descriptors, somename)`` to avoid name + collisions. .. versionadded:: 0.8.0 @@ -1995,7 +2116,7 @@ def all_orm_descriptors(self): """ return util.ImmutableProperties( - dict(self.class_manager._all_sqla_attributes())) + dict(self.class_manager._all_sqla_attributes())) @_memoized_configured_property def synonyms(self): @@ -2025,8 +2146,20 @@ def column_attrs(self): @_memoized_configured_property def relationships(self): - """Return a namespace of all :class:`.RelationshipProperty` - properties maintained by this :class:`.Mapper`. + """A namespace of all :class:`.RelationshipProperty` properties + maintained by this :class:`.Mapper`. + + .. warning:: + + the :attr:`.Mapper.relationships` accessor namespace is an + instance of :class:`.OrderedProperties`. This is + a dictionary-like object which includes a small number of + named methods such as :meth:`.OrderedProperties.items` + and :meth:`.OrderedProperties.values`. When + accessing attributes dynamically, favor using the dict-access + scheme, e.g. ``mapper.relationships[somename]`` over + ``getattr(mapper.relationships, somename)`` to avoid name + collisions. .. seealso:: @@ -2067,7 +2200,7 @@ def _get_clause(self): params = [(primary_key, sql.bindparam(None, type_=primary_key.type)) for primary_key in self.primary_key] return sql.and_(*[k == v for (k, v) in params]), \ - util.column_dict(params) + util.column_dict(params) @_memoized_configured_property def _equivalent_columns(self): @@ -2105,8 +2238,8 @@ def visit_binary(binary): for mapper in self.base_mapper.self_and_descendants: if mapper.inherit_condition is not None: visitors.traverse( - mapper.inherit_condition, {}, - {'binary': visit_binary}) + mapper.inherit_condition, {}, + {'binary': visit_binary}) return result @@ -2131,13 +2264,13 @@ def _should_exclude(self, name, assigned_name, local, column): # either local or from an inherited class if local: if self.class_.__dict__.get(assigned_name, None) is not None \ - and self._is_userland_descriptor( - self.class_.__dict__[assigned_name]): + and self._is_userland_descriptor( + self.class_.__dict__[assigned_name]): return True else: if getattr(self.class_, assigned_name, None) is not None \ - and self._is_userland_descriptor( - getattr(self.class_, assigned_name)): + and self._is_userland_descriptor( + getattr(self.class_, assigned_name)): return True if self.include_properties is not None and \ @@ -2147,10 +2280,10 @@ def _should_exclude(self, name, assigned_name, local, column): return True if self.exclude_properties is not None and \ - ( - name in self.exclude_properties or \ - (column is not None and column in self.exclude_properties) - ): + ( + name in self.exclude_properties or + (column is not None and column in self.exclude_properties) + ): self._log("excluding property %s" % (name)) return True @@ -2222,15 +2355,25 @@ def primary_mapper(self): def primary_base_mapper(self): return self.class_manager.mapper.base_mapper + def _result_has_identity_key(self, result, adapter=None): + pk_cols = self.primary_key + if adapter: + pk_cols = [adapter.columns[c] for c in pk_cols] + for col in pk_cols: + if not result._has_key(col): + return False + else: + return True + def identity_key_from_row(self, row, adapter=None): """Return an identity-map key for use in storing/retrieving an item from the identity map. - :param row: A :class:`.RowProxy` instance. The columns which are mapped - by this :class:`.Mapper` should be locatable in the row, preferably - via the :class:`.Column` object directly (as is the case when a - :func:`.select` construct is executed), or via string names of the form - ``_``. + :param row: A :class:`.RowProxy` instance. The columns which are + mapped by this :class:`.Mapper` should be locatable in the row, + preferably via the :class:`.Column` object directly (as is the case + when a :func:`.select` construct is executed), or via string names of + the form ``_``. """ pk_cols = self.primary_key @@ -2238,7 +2381,7 @@ def identity_key_from_row(self, row, adapter=None): pk_cols = [adapter.columns[c] for c in pk_cols] return self._identity_class, \ - tuple(row[column] for column in pk_cols) + tuple(row[column] for column in pk_cols) def identity_key_from_primary_key(self, primary_key): """Return an identity-map key for use in storing/retrieving an @@ -2263,14 +2406,14 @@ def identity_key_from_instance(self, instance): """ return self.identity_key_from_primary_key( - self.primary_key_from_instance(instance)) + self.primary_key_from_instance(instance)) def _identity_key_from_state(self, state): dict_ = state.dict manager = state.manager return self._identity_class, tuple([ - manager[self._columntoproperty[col].key].\ - impl.get(state, dict_, attributes.PASSIVE_OFF) + manager[self._columntoproperty[col].key]. + impl.get(state, dict_, attributes.PASSIVE_RETURN_NEVER_SET) for col in self.primary_key ]) @@ -2285,22 +2428,50 @@ def primary_key_from_instance(self, instance): """ state = attributes.instance_state(instance) - return self._primary_key_from_state(state) + return self._primary_key_from_state(state, attributes.PASSIVE_OFF) - def _primary_key_from_state(self, state): + def _primary_key_from_state( + self, state, passive=attributes.PASSIVE_RETURN_NEVER_SET): dict_ = state.dict manager = state.manager return [ - manager[self._columntoproperty[col].key].\ - impl.get(state, dict_, attributes.PASSIVE_OFF) - for col in self.primary_key + manager[prop.key]. + impl.get(state, dict_, passive) + for prop in self._identity_key_props ] - def _get_state_attr_by_column(self, state, dict_, column, - passive=attributes.PASSIVE_OFF): + @_memoized_configured_property + def _identity_key_props(self): + return [self._columntoproperty[col] for col in self.primary_key] + + @_memoized_configured_property + def _all_pk_props(self): + collection = set() + for table in self.tables: + collection.update(self._pks_by_table[table]) + return collection + + @_memoized_configured_property + def _should_undefer_in_wildcard(self): + cols = set(self.primary_key) + if self.polymorphic_on is not None: + cols.add(self.polymorphic_on) + return cols + + @_memoized_configured_property + def _primary_key_propkeys(self): + return set([prop.key for prop in self._all_pk_props]) + + def _get_state_attr_by_column( + self, state, dict_, column, + passive=attributes.PASSIVE_RETURN_NEVER_SET): prop = self._columntoproperty[column] return state.manager[prop.key].impl.get(state, dict_, passive=passive) + def _set_committed_state_attr_by_column(self, state, dict_, column, value): + prop = self._columntoproperty[column] + state.manager[prop.key].impl.set_committed_value(state, dict_, value) + def _set_state_attr_by_column(self, state, dict_, column, value): prop = self._columntoproperty[column] state.manager[prop.key].impl.set(state, dict_, value, None) @@ -2308,14 +2479,16 @@ def _set_state_attr_by_column(self, state, dict_, column, value): def _get_committed_attr_by_column(self, obj, column): state = attributes.instance_state(obj) dict_ = attributes.instance_dict(obj) - return self._get_committed_state_attr_by_column(state, dict_, column) + return self._get_committed_state_attr_by_column( + state, dict_, column, passive=attributes.PASSIVE_OFF) - def _get_committed_state_attr_by_column(self, state, dict_, - column, passive=attributes.PASSIVE_OFF): + def _get_committed_state_attr_by_column( + self, state, dict_, column, + passive=attributes.PASSIVE_RETURN_NEVER_SET): prop = self._columntoproperty[column] return state.manager[prop.key].impl.\ - get_committed_value(state, dict_, passive=passive) + get_committed_value(state, dict_, passive=passive) def _optimized_get_statement(self, state, attribute_names): """assemble a WHERE clause which retrieves a given state by primary @@ -2330,10 +2503,10 @@ def _optimized_get_statement(self, state, attribute_names): props = self._props tables = set(chain( - *[sql_util.find_tables(c, check_columns=True) - for key in attribute_names - for c in props[key].columns] - )) + *[sql_util.find_tables(c, check_columns=True) + for key in attribute_names + for c in props[key].columns] + )) if self.base_mapper.local_table in tables: return None @@ -2349,23 +2522,22 @@ def visit_binary(binary): if leftcol.table not in tables: leftval = self._get_committed_state_attr_by_column( - state, state.dict, - leftcol, - passive=attributes.PASSIVE_NO_INITIALIZE) - if leftval is attributes.PASSIVE_NO_RESULT or leftval is None: + state, state.dict, + leftcol, + passive=attributes.PASSIVE_NO_INITIALIZE) + if leftval in orm_util._none_set: raise ColumnsNotAvailable() binary.left = sql.bindparam(None, leftval, type_=binary.right.type) elif rightcol.table not in tables: rightval = self._get_committed_state_attr_by_column( - state, state.dict, - rightcol, - passive=attributes.PASSIVE_NO_INITIALIZE) - if rightval is attributes.PASSIVE_NO_RESULT or \ - rightval is None: + state, state.dict, + rightcol, + passive=attributes.PASSIVE_NO_INITIALIZE) + if rightval in orm_util._none_set: raise ColumnsNotAvailable() binary.right = sql.bindparam(None, rightval, - type_=binary.right.type) + type_=binary.right.type) allconds = [] @@ -2374,15 +2546,16 @@ def visit_binary(binary): for mapper in reversed(list(self.iterate_to_root())): if mapper.local_table in tables: start = True - elif not isinstance(mapper.local_table, expression.TableClause): + elif not isinstance(mapper.local_table, + expression.TableClause): return None if start and not mapper.single: allconds.append(visitors.cloned_traverse( - mapper.inherit_condition, - {}, - {'binary': visit_binary} - ) - ) + mapper.inherit_condition, + {}, + {'binary': visit_binary} + ) + ) except ColumnsNotAvailable: return None @@ -2398,22 +2571,31 @@ def cascade_iterator(self, type_, state, halt_on=None): for all relationships that meet the given cascade rule. :param type_: - The name of the cascade rule (i.e. save-update, delete, - etc.) + The name of the cascade rule (i.e. ``"save-update"``, ``"delete"``, + etc.). + + .. note:: the ``"all"`` cascade is not accepted here. For a generic + object traversal function, see :ref:`faq_walk_objects`. :param state: The lead InstanceState. child items will be processed per the relationships defined for this object's mapper. - the return value are object instances; this provides a strong - reference so that they don't fall out of scope immediately. + :return: the method yields individual object instances. + + .. seealso:: + + :ref:`unitofwork_cascades` + + :ref:`faq_walk_objects` - illustrates a generic function to + traverse all objects without relying on cascades. """ visited_states = set() prp, mpp = object(), object() visitables = deque([(deque(self._props.values()), prp, - state, state.dict)]) + state, state.dict)]) while visitables: iterator, item_type, parent_state, parent_dict = visitables[-1] @@ -2425,18 +2607,19 @@ def cascade_iterator(self, type_, state, halt_on=None): prop = iterator.popleft() if type_ not in prop.cascade: continue - queue = deque(prop.cascade_iterator(type_, parent_state, - parent_dict, visited_states, halt_on)) + queue = deque(prop.cascade_iterator( + type_, parent_state, parent_dict, + visited_states, halt_on)) if queue: visitables.append((queue, mpp, None, None)) elif item_type is mpp: instance, instance_mapper, corresponding_state, \ - corresponding_dict = iterator.popleft() + corresponding_dict = iterator.popleft() yield instance, instance_mapper, \ - corresponding_state, corresponding_dict + corresponding_state, corresponding_dict visitables.append((deque(instance_mapper._props.values()), - prp, corresponding_state, - corresponding_dict)) + prp, corresponding_state, + corresponding_dict)) @_memoized_configured_property def _compiled_cache(self): @@ -2457,13 +2640,13 @@ def _sorted_tables(self): extra_dependencies.extend([ (super_table, table) for super_table in super_.tables - ]) + ]) def skip(fk): # attempt to skip dependencies that are not # significant to the inheritance chain # for two tables that are related by inheritance. - # while that dependency may be important, it's techinically + # while that dependency may be important, it's technically # not what we mean to sort on here. parent = table_to_mapper.get(fk.parent.table) dep = table_to_mapper.get(fk.column.table) @@ -2474,15 +2657,15 @@ def skip(fk): cols = set(sql_util._find_columns(dep.inherit_condition)) if parent.inherit_condition is not None: cols = cols.union(sql_util._find_columns( - parent.inherit_condition)) + parent.inherit_condition)) return fk.parent not in cols and fk.column not in cols else: return fk.parent not in cols return False sorted_ = sql_util.sort_tables(table_to_mapper, - skip_fn=skip, - extra_dependencies=extra_dependencies) + skip_fn=skip, + extra_dependencies=extra_dependencies) ret = util.OrderedDict() for t in sorted_: @@ -2507,10 +2690,11 @@ def _table_to_equated(self): cols = set(table.c) for m in self.iterate_to_root(): if m._inherits_equated_pairs and \ - cols.intersection( - util.reduce(set.union, - [l.proxy_set for l, r in m._inherits_equated_pairs]) - ): + cols.intersection( + util.reduce(set.union, + [l.proxy_set for l, r in + m._inherits_equated_pairs]) + ): result[table].append((m, m._inherits_equated_pairs)) return result @@ -2521,7 +2705,33 @@ def configure_mappers(): have been constructed thus far. This function can be called any number of times, but in - most cases is handled internally. + most cases is invoked automatically, the first time mappings are used, + as well as whenever mappings are used and additional not-yet-configured + mappers have been constructed. + + Points at which this occur include when a mapped class is instantiated + into an instance, as well as when the :meth:`.Session.query` method + is used. + + The :func:`.configure_mappers` function provides several event hooks + that can be used to augment its functionality. These methods include: + + * :meth:`.MapperEvents.before_configured` - called once before + :func:`.configure_mappers` does any work; this can be used to establish + additional options, properties, or related mappings before the operation + proceeds. + + * :meth:`.MapperEvents.mapper_configured` - called as each indivudal + :class:`.Mapper` is configured within the process; will include all + mapper state except for backrefs set up by other mappers that are still + to be configured. + + * :meth:`.MapperEvents.after_configured` - called once after + :func:`.configure_mappers` is complete; at this stage, all + :class:`.Mapper` objects that are known to SQLAlchemy will be fully + configured. Note that the calling application may still have other + mappings that haven't been produced yet, such as if they are in modules + as yet unimported. """ @@ -2540,7 +2750,7 @@ def configure_mappers(): if not Mapper._new_mappers: return - Mapper.dispatch(Mapper).before_configured() + Mapper.dispatch._for_class(Mapper).before_configured() # initialize properties on all mappers # note that _mapper_registry is unordered, which # may randomly conceal/reveal issues related to @@ -2549,10 +2759,10 @@ def configure_mappers(): for mapper in list(_mapper_registry): if getattr(mapper, '_configure_failed', False): e = sa_exc.InvalidRequestError( - "One or more mappers failed to initialize - " - "can't proceed with initialization of other " - "mappers. Original exception was: %s" - % mapper._configure_failed) + "One or more mappers failed to initialize - " + "can't proceed with initialization of other " + "mappers. Original exception was: %s" + % mapper._configure_failed) e._configure_failed = mapper._configure_failed raise e if not mapper.configured: @@ -2560,8 +2770,8 @@ def configure_mappers(): mapper._post_configure_properties() mapper._expire_memoizations() mapper.dispatch.mapper_configured( - mapper, mapper.class_) - except: + mapper, mapper.class_) + except Exception: exc = sys.exc_info()[1] if not hasattr(exc, '_configure_failed'): mapper._configure_failed = exc @@ -2572,7 +2782,7 @@ def configure_mappers(): _already_compiling = False finally: _CONFIGURE_MUTEX.release() - Mapper.dispatch(Mapper).after_configured() + Mapper.dispatch._for_class(Mapper).after_configured() def reconstructor(fn): @@ -2637,9 +2847,9 @@ def validates(*names, **kw): def wrap(fn): fn.__sa_validators__ = names fn.__sa_validation_opts__ = { - "include_removes": include_removes, - "include_backrefs": include_backrefs - } + "include_removes": include_removes, + "include_backrefs": include_backrefs + } return fn return wrap @@ -2681,19 +2891,11 @@ def _event_on_init(state, args, kwargs): instrumenting_mapper._set_polymorphic_identity(state) -def _event_on_resurrect(state): - # re-populate the primary key elements - # of the dict based on the mapping. - instrumenting_mapper = state.manager.info.get(_INSTRUMENTOR) - if instrumenting_mapper: - for col, val in zip(instrumenting_mapper.primary_key, state.key[1]): - instrumenting_mapper._set_state_attr_by_column( - state, state.dict, col, val) - - class _ColumnMapping(dict): """Error reporting helper for mapper._columntoproperty.""" + __slots__ = 'mapper', + def __init__(self, mapper): self.mapper = mapper diff --git a/lib/sqlalchemy/orm/path_registry.py b/lib/sqlalchemy/orm/path_registry.py index 3397626b84..cf18465555 100644 --- a/lib/sqlalchemy/orm/path_registry.py +++ b/lib/sqlalchemy/orm/path_registry.py @@ -1,5 +1,6 @@ # orm/path_registry.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -12,6 +13,10 @@ from .. import exc from itertools import chain from .base import class_mapper +import logging + +log = logging.getLogger(__name__) + def _unreduce_path(path): return PathRegistry.deserialize(path) @@ -20,6 +25,7 @@ def _unreduce_path(path): _WILDCARD_TOKEN = "*" _DEFAULT_TOKEN = "_sa_default" + class PathRegistry(object): """Represent query load paths and registry functions. @@ -46,14 +52,19 @@ class PathRegistry(object): """ + is_token = False + is_root = False + def __eq__(self, other): return other is not None and \ self.path == other.path def set(self, attributes, key, value): + log.debug("set '%s' on path '%s' to '%s'", key, self, value) attributes[(key, self.path)] = value def setdefault(self, attributes, key, value): + log.debug("setdefault '%s' on path '%s' to '%s'", key, self, value) attributes.setdefault((key, self.path), value) def get(self, attributes, key, value=None): @@ -80,7 +91,7 @@ def contains_mapper(self, mapper): self.path[i] for i in range(0, len(self.path), 2) ]: if path_mapper.is_mapper and \ - path_mapper.isa(mapper): + path_mapper.isa(mapper): return True else: return False @@ -104,9 +115,9 @@ def deserialize(cls, path): return None p = tuple(chain(*[(class_mapper(mcls), - class_mapper(mcls).attrs[key] - if key is not None else None) - for mcls, key in path])) + class_mapper(mcls).attrs[key] + if key is not None else None) + for mcls, key in path])) if p and p[-1] is None: p = p[0:-1] return cls.coerce(p) @@ -114,8 +125,8 @@ def deserialize(cls, path): @classmethod def per_mapper(cls, mapper): return EntityRegistry( - cls.root, mapper - ) + cls.root, mapper + ) @classmethod def coerce(cls, raw): @@ -131,8 +142,8 @@ def token(self, token): def __add__(self, other): return util.reduce( - lambda prev, next: prev[next], - other.path, self) + lambda prev, next: prev[next], + other.path, self) def __repr__(self): return "%s(%r)" % (self.__class__.__name__, self.path, ) @@ -145,11 +156,15 @@ class RootRegistry(PathRegistry): """ path = () has_entity = False + is_aliased_class = False + is_root = True + def __getitem__(self, entity): return entity._path_registry PathRegistry.root = RootRegistry() + class TokenRegistry(PathRegistry): def __init__(self, parent, token): self.token = token @@ -158,9 +173,19 @@ def __init__(self, parent, token): has_entity = False + is_token = True + + def generate_for_superclasses(self): + if not self.parent.is_aliased_class and not self.parent.is_root: + for ent in self.parent.mapper.iterate_to_root(): + yield TokenRegistry(self.parent.parent[ent], self.token) + else: + yield self + def __getitem__(self, entity): raise NotImplementedError() + class PropRegistry(PathRegistry): def __init__(self, parent, prop): # restate this path in terms of the @@ -170,7 +195,7 @@ def __init__(self, parent, prop): parent = parent.parent[prop.parent] elif insp.is_aliased_class and insp.with_polymorphic_mappers: if prop.parent is not insp.mapper and \ - prop.parent in insp.with_polymorphic_mappers: + prop.parent in insp.with_polymorphic_mappers: subclass_entity = parent[-1]._entity_for_mapper(prop.parent) parent = parent.parent[subclass_entity] @@ -178,6 +203,11 @@ def __init__(self, parent, prop): self.parent = parent self.path = parent.path + (prop,) + def __str__(self): + return " -> ".join( + str(elem) for elem in self.path + ) + @util.memoized_property def has_entity(self): return hasattr(self.prop, "mapper") @@ -195,16 +225,18 @@ def _wildcard_path_loader_key(self): """ return ("loader", self.parent.token( - "%s:%s" % (self.prop.strategy_wildcard_key, _WILDCARD_TOKEN) - ).path + "%s:%s" % ( + self.prop.strategy_wildcard_key, _WILDCARD_TOKEN) + ).path ) @util.memoized_property def _default_path_loader_key(self): return ("loader", self.parent.token( - "%s:%s" % (self.prop.strategy_wildcard_key, _DEFAULT_TOKEN) - ).path + "%s:%s" % (self.prop.strategy_wildcard_key, + _DEFAULT_TOKEN) + ).path ) @util.memoized_property @@ -227,6 +259,7 @@ def __getitem__(self, entity): self, entity ) + class EntityRegistry(PathRegistry, dict): is_aliased_class = False has_entity = True @@ -256,6 +289,3 @@ def __getitem__(self, entity): def __missing__(self, key): self[key] = item = PropRegistry(self, key) return item - - - diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py index 1bd432f155..5d69f51f3f 100644 --- a/lib/sqlalchemy/orm/persistence.py +++ b/lib/sqlalchemy/orm/persistence.py @@ -1,5 +1,6 @@ # orm/persistence.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -14,15 +15,114 @@ """ import operator -from itertools import groupby -from .. import sql, util, exc as sa_exc, schema +from itertools import groupby, chain +from .. import sql, util, exc as sa_exc from . import attributes, sync, exc as orm_exc, evaluator -from .base import _state_mapper, state_str, _attr_as_key +from .base import state_str, _attr_as_key, _entity_descriptor from ..sql import expression +from ..sql.base import _from_objects from . import loading -def save_obj(base_mapper, states, uowtransaction, single=False): +def _bulk_insert( + mapper, mappings, session_transaction, isstates, return_defaults): + base_mapper = mapper.base_mapper + + cached_connections = _cached_connection_dict(base_mapper) + + if session_transaction.session.connection_callable: + raise NotImplementedError( + "connection_callable / per-instance sharding " + "not supported in bulk_insert()") + + if isstates: + if return_defaults: + states = [(state, state.dict) for state in mappings] + mappings = [dict_ for (state, dict_) in states] + else: + mappings = [state.dict for state in mappings] + else: + mappings = list(mappings) + + connection = session_transaction.connection(base_mapper) + for table, super_mapper in base_mapper._sorted_tables.items(): + if not mapper.isa(super_mapper): + continue + + records = ( + (None, state_dict, params, mapper, + connection, value_params, has_all_pks, has_all_defaults) + for + state, state_dict, params, mp, + conn, value_params, has_all_pks, + has_all_defaults in _collect_insert_commands(table, ( + (None, mapping, mapper, connection) + for mapping in mappings), + bulk=True, return_defaults=return_defaults + ) + ) + _emit_insert_statements(base_mapper, None, + cached_connections, + super_mapper, table, records, + bookkeeping=return_defaults) + + if return_defaults and isstates: + identity_cls = mapper._identity_class + identity_props = [p.key for p in mapper._identity_key_props] + for state, dict_ in states: + state.key = ( + identity_cls, + tuple([dict_[key] for key in identity_props]) + ) + + +def _bulk_update(mapper, mappings, session_transaction, + isstates, update_changed_only): + base_mapper = mapper.base_mapper + + cached_connections = _cached_connection_dict(base_mapper) + + def _changed_dict(mapper, state): + return dict( + (k, v) + for k, v in state.dict.items() if k in state.committed_state or k + in mapper._primary_key_propkeys + ) + + if isstates: + if update_changed_only: + mappings = [_changed_dict(mapper, state) for state in mappings] + else: + mappings = [state.dict for state in mappings] + else: + mappings = list(mappings) + + if session_transaction.session.connection_callable: + raise NotImplementedError( + "connection_callable / per-instance sharding " + "not supported in bulk_update()") + + connection = session_transaction.connection(base_mapper) + + for table, super_mapper in base_mapper._sorted_tables.items(): + if not mapper.isa(super_mapper): + continue + + records = _collect_update_commands(None, table, ( + (None, mapping, mapper, connection, + (mapping[mapper._version_id_prop.key] + if mapper._version_id_prop else None)) + for mapping in mappings + ), bulk=True) + + _emit_update_statements(base_mapper, None, + cached_connections, + super_mapper, table, records, + bookkeeping=False) + + +def save_obj( + base_mapper, states, uowtransaction, single=False): """Issue ``INSERT`` and/or ``UPDATE`` statements for a list of objects. @@ -39,32 +139,54 @@ def save_obj(base_mapper, states, uowtransaction, single=False): save_obj(base_mapper, [state], uowtransaction, single=True) return - states_to_insert, states_to_update = _organize_states_for_save( - base_mapper, - states, - uowtransaction) - + states_to_update = [] + states_to_insert = [] cached_connections = _cached_connection_dict(base_mapper) + for (state, dict_, mapper, connection, + has_identity, + row_switch, update_version_id) in _organize_states_for_save( + base_mapper, states, uowtransaction + ): + if has_identity or row_switch: + states_to_update.append( + (state, dict_, mapper, connection, update_version_id) + ) + else: + states_to_insert.append( + (state, dict_, mapper, connection) + ) + for table, mapper in base_mapper._sorted_tables.items(): - insert = _collect_insert_commands(base_mapper, uowtransaction, - table, states_to_insert) + if table not in mapper._pks_by_table: + continue + insert = _collect_insert_commands(table, states_to_insert) - update = _collect_update_commands(base_mapper, uowtransaction, - table, states_to_update) + update = _collect_update_commands( + uowtransaction, table, states_to_update) - if update: - _emit_update_statements(base_mapper, uowtransaction, - cached_connections, - mapper, table, update) + _emit_update_statements(base_mapper, uowtransaction, + cached_connections, + mapper, table, update) - if insert: - _emit_insert_statements(base_mapper, uowtransaction, - cached_connections, - mapper, table, insert) + _emit_insert_statements(base_mapper, uowtransaction, + cached_connections, + mapper, table, insert) - _finalize_insert_update_commands(base_mapper, uowtransaction, - states_to_insert, states_to_update) + _finalize_insert_update_commands( + base_mapper, uowtransaction, + chain( + ( + (state, state_dict, mapper, connection, False) + for state, state_dict, mapper, connection in states_to_insert + ), + ( + (state, state_dict, mapper, connection, True) + for state, state_dict, mapper, connection, + update_version_id in states_to_update + ) + ) + ) def post_update(base_mapper, states, uowtransaction, post_update_cols): @@ -74,19 +196,28 @@ def post_update(base_mapper, states, uowtransaction, post_update_cols): """ cached_connections = _cached_connection_dict(base_mapper) - states_to_update = _organize_states_for_post_update( - base_mapper, - states, uowtransaction) + states_to_update = list(_organize_states_for_post_update( + base_mapper, + states, uowtransaction)) for table, mapper in base_mapper._sorted_tables.items(): + if table not in mapper._pks_by_table: + continue + + update = ( + (state, state_dict, sub_mapper, connection) + for + state, state_dict, sub_mapper, connection in states_to_update + if table in sub_mapper._pks_by_table + ) + update = _collect_post_update_commands(base_mapper, uowtransaction, - table, states_to_update, - post_update_cols) + table, update, + post_update_cols) - if update: - _emit_post_update_statements(base_mapper, uowtransaction, - cached_connections, - mapper, table, update) + _emit_post_update_statements(base_mapper, uowtransaction, + cached_connections, + mapper, table, update) def delete_obj(base_mapper, states, uowtransaction): @@ -99,24 +230,26 @@ def delete_obj(base_mapper, states, uowtransaction): cached_connections = _cached_connection_dict(base_mapper) - states_to_delete = _organize_states_for_delete( - base_mapper, - states, - uowtransaction) + states_to_delete = list(_organize_states_for_delete( + base_mapper, + states, + uowtransaction)) table_to_mapper = base_mapper._sorted_tables for table in reversed(list(table_to_mapper.keys())): - delete = _collect_delete_commands(base_mapper, uowtransaction, - table, states_to_delete) - mapper = table_to_mapper[table] + if table not in mapper._pks_by_table: + continue + + delete = _collect_delete_commands(base_mapper, uowtransaction, + table, states_to_delete) _emit_delete_statements(base_mapper, uowtransaction, - cached_connections, mapper, table, delete) + cached_connections, mapper, table, delete) - for state, state_dict, mapper, has_identity, connection \ - in states_to_delete: + for state, state_dict, mapper, connection, \ + update_version_id in states_to_delete: mapper.dispatch.after_delete(mapper, connection, state) @@ -132,17 +265,15 @@ def _organize_states_for_save(base_mapper, states, uowtransaction): """ - states_to_insert = [] - states_to_update = [] - for state, dict_, mapper, connection in _connections_for_states( - base_mapper, uowtransaction, - states): + base_mapper, uowtransaction, + states): has_identity = bool(state.key) + instance_key = state.key or mapper._identity_key_from_state(state) - row_switch = None + row_switch = update_version_id = None # call before_XXX extensions if not has_identity: @@ -179,22 +310,18 @@ def _organize_states_for_save(base_mapper, states, uowtransaction): uowtransaction.remove_state_actions(existing) row_switch = existing - if not has_identity and not row_switch: - states_to_insert.append( - (state, dict_, mapper, connection, - has_identity, instance_key, row_switch) - ) - else: - states_to_update.append( - (state, dict_, mapper, connection, - has_identity, instance_key, row_switch) - ) + if (has_identity or row_switch) and mapper.version_id_col is not None: + update_version_id = mapper._get_committed_state_attr_by_column( + row_switch if row_switch else state, + row_switch.dict if row_switch else dict_, + mapper.version_id_col) - return states_to_insert, states_to_update + yield (state, dict_, mapper, connection, + has_identity, row_switch, update_version_id) def _organize_states_for_post_update(base_mapper, states, - uowtransaction): + uowtransaction): """Make an initial pass across a set of states for UPDATE corresponding to post_update. @@ -203,8 +330,7 @@ def _organize_states_for_post_update(base_mapper, states, the execution per state. """ - return list(_connections_for_states(base_mapper, uowtransaction, - states)) + return _connections_for_states(base_mapper, uowtransaction, states) def _organize_states_for_delete(base_mapper, states, uowtransaction): @@ -215,72 +341,81 @@ def _organize_states_for_delete(base_mapper, states, uowtransaction): mapper, the connection to use for the execution per state. """ - states_to_delete = [] - for state, dict_, mapper, connection in _connections_for_states( - base_mapper, uowtransaction, - states): + base_mapper, uowtransaction, + states): mapper.dispatch.before_delete(mapper, connection, state) - states_to_delete.append((state, dict_, mapper, - bool(state.key), connection)) - return states_to_delete + if mapper.version_id_col is not None: + update_version_id = \ + mapper._get_committed_state_attr_by_column( + state, dict_, + mapper.version_id_col) + else: + update_version_id = None + + yield ( + state, dict_, mapper, connection, update_version_id) -def _collect_insert_commands(base_mapper, uowtransaction, table, - states_to_insert): +def _collect_insert_commands( + table, states_to_insert, + bulk=False, return_defaults=False): """Identify sets of values to use in INSERT statements for a list of states. """ - insert = [] - for state, state_dict, mapper, connection, has_identity, \ - instance_key, row_switch in states_to_insert: + for state, state_dict, mapper, connection in states_to_insert: if table not in mapper._pks_by_table: continue - pks = mapper._pks_by_table[table] - params = {} value_params = {} - has_all_pks = True - has_all_defaults = True - for col in mapper._cols_by_table[table]: - if col is mapper.version_id_col and \ - mapper.version_id_generator is not False: - val = mapper.version_id_generator(None) - params[col.key] = val + propkey_to_col = mapper._propkey_to_col[table] + + for propkey in set(propkey_to_col).intersection(state_dict): + value = state_dict[propkey] + col = propkey_to_col[propkey] + if value is None: + continue + elif not bulk and isinstance(value, sql.ClauseElement): + value_params[col.key] = value else: - # pull straight from the dict for - # pending objects - prop = mapper._columntoproperty[col] - value = state_dict.get(prop.key, None) - - if value is None: - if col in pks: - has_all_pks = False - elif col.default is None and \ - col.server_default is None: - params[col.key] = value - elif col.server_default is not None and \ - mapper.base_mapper.eager_defaults: - has_all_defaults = False - - elif isinstance(value, sql.ClauseElement): - value_params[col] = value - else: - params[col.key] = value + params[col.key] = value + + if not bulk: + for colkey in mapper._insert_cols_as_none[table].\ + difference(params).difference(value_params): + params[colkey] = None + + if not bulk or return_defaults: + has_all_pks = mapper._pk_keys_by_table[table].issubset(params) + + if mapper.base_mapper.eager_defaults: + has_all_defaults = mapper._server_default_cols[table].\ + issubset(params) + else: + has_all_defaults = True + else: + has_all_defaults = has_all_pks = True + + if mapper.version_id_generator is not False \ + and mapper.version_id_col is not None and \ + mapper.version_id_col in mapper._cols_by_table[table]: + params[mapper.version_id_col.key] = \ + mapper.version_id_generator(None) - insert.append((state, state_dict, params, mapper, - connection, value_params, has_all_pks, - has_all_defaults)) - return insert + yield ( + state, state_dict, params, mapper, + connection, value_params, has_all_pks, + has_all_defaults) -def _collect_update_commands(base_mapper, uowtransaction, - table, states_to_update): +def _collect_update_commands( + uowtransaction, table, states_to_update, + bulk=False): """Identify sets of values to use in UPDATE statements for a list of states. @@ -292,121 +427,127 @@ def _collect_update_commands(base_mapper, uowtransaction, """ - update = [] - for state, state_dict, mapper, connection, has_identity, \ - instance_key, row_switch in states_to_update: + for state, state_dict, mapper, connection, \ + update_version_id in states_to_update: + if table not in mapper._pks_by_table: continue pks = mapper._pks_by_table[table] - params = {} value_params = {} - hasdata = hasnull = False - for col in mapper._cols_by_table[table]: - if col is mapper.version_id_col: - params[col._label] = \ - mapper._get_committed_state_attr_by_column( - row_switch or state, - row_switch and row_switch.dict - or state_dict, - col) + propkey_to_col = mapper._propkey_to_col[table] - prop = mapper._columntoproperty[col] - history = attributes.get_state_history( - state, prop.key, - attributes.PASSIVE_NO_INITIALIZE - ) - if history.added: - params[col.key] = history.added[0] - hasdata = True - else: - if mapper.version_id_generator is not False: - val = mapper.version_id_generator(params[col._label]) - params[col.key] = val - - # HACK: check for history, in case the - # history is only - # in a different table than the one - # where the version_id_col is. - for prop in mapper._columntoproperty.values(): - history = attributes.get_state_history( - state, prop.key, - attributes.PASSIVE_NO_INITIALIZE) - if history.added: - hasdata = True + if bulk: + params = dict( + (propkey_to_col[propkey].key, state_dict[propkey]) + for propkey in + set(propkey_to_col).intersection(state_dict).difference( + mapper._pk_keys_by_table[table]) + ) + has_all_defaults = True + else: + params = {} + for propkey in set(propkey_to_col).intersection( + state.committed_state): + value = state_dict[propkey] + col = propkey_to_col[propkey] + + if isinstance(value, sql.ClauseElement): + value_params[col] = value + # guard against values that generate non-__nonzero__ + # objects for __eq__() + elif state.manager[propkey].impl.is_equal( + value, state.committed_state[propkey]) is not True: + params[col.key] = value + + if mapper.base_mapper.eager_defaults: + has_all_defaults = mapper._server_onupdate_default_cols[table].\ + issubset(params) else: - prop = mapper._columntoproperty[col] - history = attributes.get_state_history( - state, prop.key, - attributes.PASSIVE_NO_INITIALIZE) + has_all_defaults = True + + if update_version_id is not None and \ + mapper.version_id_col in mapper._cols_by_table[table]: + + if not bulk and not (params or value_params): + # HACK: check for history in other tables, in case the + # history is only in a different table than the one + # where the version_id_col is. This logic was lost + # from 0.9 -> 1.0.0 and restored in 1.0.6. + for prop in mapper._columntoproperty.values(): + history = ( + state.manager[prop.key].impl.get_history( + state, state_dict, + attributes.PASSIVE_NO_INITIALIZE)) + if history.added: + break + else: + # no net change, break + continue + + col = mapper.version_id_col + params[col._label] = update_version_id + + if (bulk or col.key not in params) and \ + mapper.version_id_generator is not False: + val = mapper.version_id_generator(update_version_id) + params[col.key] = val + + elif not (params or value_params): + continue + + if bulk: + pk_params = dict( + (propkey_to_col[propkey]._label, state_dict.get(propkey)) + for propkey in + set(propkey_to_col). + intersection(mapper._pk_keys_by_table[table]) + ) + else: + pk_params = {} + for col in pks: + propkey = mapper._columntoproperty[col].key + + history = state.manager[propkey].impl.get_history( + state, state_dict, attributes.PASSIVE_OFF) + if history.added: - if isinstance(history.added[0], - sql.ClauseElement): - value_params[col] = history.added[0] - else: - value = history.added[0] - params[col.key] = value - - if col in pks: - if history.deleted and \ - not row_switch: - # if passive_updates and sync detected - # this was a pk->pk sync, use the new - # value to locate the row, since the - # DB would already have set this - if ("pk_cascaded", state, col) in \ - uowtransaction.attributes: - value = history.added[0] - params[col._label] = value - else: - # use the old value to - # locate the row - value = history.deleted[0] - params[col._label] = value - hasdata = True - else: - # row switch logic can reach us here - # remove the pk from the update params - # so the update doesn't - # attempt to include the pk in the - # update statement - del params[col.key] - value = history.added[0] - params[col._label] = value - if value is None: - hasnull = True + if not history.deleted or \ + ("pk_cascaded", state, col) in \ + uowtransaction.attributes: + pk_params[col._label] = history.added[0] + params.pop(col.key, None) else: - hasdata = True - elif col in pks: - value = state.manager[prop.key].impl.get( - state, state_dict) - if value is None: - hasnull = True - params[col._label] = value - if hasdata: - if hasnull: - raise orm_exc.FlushError( - "Can't update table " - "using NULL for primary " - "key value") - update.append((state, state_dict, params, mapper, - connection, value_params)) - return update + # else, use the old value to locate the row + pk_params[col._label] = history.deleted[0] + params[col.key] = history.added[0] + else: + pk_params[col._label] = history.unchanged[0] + if pk_params[col._label] is None: + raise orm_exc.FlushError( + "Can't update table %s using NULL for primary " + "key value on column %s" % (table, col)) + + if params or value_params: + params.update(pk_params) + yield ( + state, state_dict, params, mapper, + connection, value_params, has_all_defaults) def _collect_post_update_commands(base_mapper, uowtransaction, table, - states_to_update, post_update_cols): + states_to_update, post_update_cols): """Identify sets of values to use in UPDATE statements for a list of states within a post_update operation. """ - update = [] for state, state_dict, mapper, connection in states_to_update: - if table not in mapper._pks_by_table: - continue + + # assert table in mapper._pks_by_table + pks = mapper._pks_by_table[table] params = {} hasdata = False @@ -414,158 +555,223 @@ def _collect_post_update_commands(base_mapper, uowtransaction, table, for col in mapper._cols_by_table[table]: if col in pks: params[col._label] = \ - mapper._get_state_attr_by_column( - state, - state_dict, col) + mapper._get_state_attr_by_column( + state, + state_dict, col, passive=attributes.PASSIVE_OFF) elif col in post_update_cols: prop = mapper._columntoproperty[col] - history = attributes.get_state_history( - state, prop.key, - attributes.PASSIVE_NO_INITIALIZE) + history = state.manager[prop.key].impl.get_history( + state, state_dict, + attributes.PASSIVE_NO_INITIALIZE) if history.added: value = history.added[0] params[col.key] = value hasdata = True if hasdata: - update.append((state, state_dict, params, mapper, - connection)) - return update + yield params, connection def _collect_delete_commands(base_mapper, uowtransaction, table, - states_to_delete): + states_to_delete): """Identify values to use in DELETE statements for a list of states to be deleted.""" - delete = util.defaultdict(list) + for state, state_dict, mapper, connection, \ + update_version_id in states_to_delete: - for state, state_dict, mapper, has_identity, connection \ - in states_to_delete: - if not has_identity or table not in mapper._pks_by_table: + if table not in mapper._pks_by_table: continue params = {} - delete[connection].append(params) for col in mapper._pks_by_table[table]: params[col.key] = \ - value = \ - mapper._get_committed_state_attr_by_column( - state, state_dict, col) + value = \ + mapper._get_committed_state_attr_by_column( + state, state_dict, col) if value is None: raise orm_exc.FlushError( - "Can't delete from table " - "using NULL for primary " - "key value") + "Can't delete from table %s " + "using NULL for primary " + "key value on column %s" % (table, col)) - if mapper.version_id_col is not None and \ - table.c.contains_column(mapper.version_id_col): - params[mapper.version_id_col.key] = \ - mapper._get_committed_state_attr_by_column( - state, state_dict, - mapper.version_id_col) - return delete + if update_version_id is not None and \ + mapper.version_id_col in mapper._cols_by_table[table]: + params[mapper.version_id_col.key] = update_version_id + yield params, connection def _emit_update_statements(base_mapper, uowtransaction, - cached_connections, mapper, table, update): + cached_connections, mapper, table, update, + bookkeeping=True): """Emit UPDATE statements corresponding to value lists collected by _collect_update_commands().""" needs_version_id = mapper.version_id_col is not None and \ - table.c.contains_column(mapper.version_id_col) + mapper.version_id_col in mapper._cols_by_table[table] def update_stmt(): clause = sql.and_() for col in mapper._pks_by_table[table]: clause.clauses.append(col == sql.bindparam(col._label, - type_=col.type)) + type_=col.type)) if needs_version_id: - clause.clauses.append(mapper.version_id_col ==\ - sql.bindparam(mapper.version_id_col._label, - type_=mapper.version_id_col.type)) + clause.clauses.append( + mapper.version_id_col == sql.bindparam( + mapper.version_id_col._label, + type_=mapper.version_id_col.type)) stmt = table.update(clause) - if mapper.base_mapper.eager_defaults: - stmt = stmt.return_defaults() + return stmt + + cached_stmt = base_mapper._memo(('update', table), update_stmt) + + for (connection, paramkeys, hasvalue, has_all_defaults), \ + records in groupby( + update, + lambda rec: ( + rec[4], # connection + set(rec[2]), # set of parameter keys + bool(rec[5]), # whether or not we have "value" parameters + rec[6] # has_all_defaults + ) + ): + rows = 0 + records = list(records) + + statement = cached_stmt + + # TODO: would be super-nice to not have to determine this boolean + # inside the loop here, in the 99.9999% of the time there's only + # one connection in use + assert_singlerow = connection.dialect.supports_sane_rowcount + assert_multirow = assert_singlerow and \ + connection.dialect.supports_sane_multi_rowcount + allow_multirow = has_all_defaults and not needs_version_id + + if bookkeeping and not has_all_defaults and \ + mapper.base_mapper.eager_defaults: + statement = statement.return_defaults() elif mapper.version_id_col is not None: - stmt = stmt.return_defaults(mapper.version_id_col) + statement = statement.return_defaults(mapper.version_id_col) + + if hasvalue: + for state, state_dict, params, mapper, \ + connection, value_params, has_all_defaults in records: + c = connection.execute( + statement.values(value_params), + params) + if bookkeeping: + _postfetch( + mapper, + uowtransaction, + table, + state, + state_dict, + c, + c.context.compiled_parameters[0], + value_params) + rows += c.rowcount + check_rowcount = True + else: + if not allow_multirow: + check_rowcount = assert_singlerow + for state, state_dict, params, mapper, \ + connection, value_params, has_all_defaults in records: + c = cached_connections[connection].\ + execute(statement, params) + + # TODO: why with bookkeeping=False? + _postfetch( + mapper, + uowtransaction, + table, + state, + state_dict, + c, + c.context.compiled_parameters[0], + value_params) + rows += c.rowcount + else: + multiparams = [rec[2] for rec in records] - return stmt + check_rowcount = assert_multirow or ( + assert_singlerow and + len(multiparams) == 1 + ) - statement = base_mapper._memo(('update', table), update_stmt) + c = cached_connections[connection].\ + execute(statement, multiparams) - rows = 0 - for state, state_dict, params, mapper, \ - connection, value_params in update: + rows += c.rowcount - if value_params: - c = connection.execute( - statement.values(value_params), - params) - else: - c = cached_connections[connection].\ - execute(statement, params) - - _postfetch( - mapper, - uowtransaction, - table, - state, - state_dict, - c, - c.context.compiled_parameters[0], - value_params) - rows += c.rowcount - - if connection.dialect.supports_sane_rowcount: - if rows != len(update): - raise orm_exc.StaleDataError( + # TODO: why with bookkeeping=False? + for state, state_dict, params, mapper, \ + connection, value_params, has_all_defaults in records: + _postfetch( + mapper, + uowtransaction, + table, + state, + state_dict, + c, + c.context.compiled_parameters[0], + value_params) + + if check_rowcount: + if rows != len(records): + raise orm_exc.StaleDataError( "UPDATE statement on table '%s' expected to " "update %d row(s); %d were matched." % - (table.description, len(update), rows)) + (table.description, len(records), rows)) - elif needs_version_id: - util.warn("Dialect %s does not support updated rowcount " - "- versioning cannot be verified." % - c.dialect.dialect_description, - stacklevel=12) + elif needs_version_id: + util.warn("Dialect %s does not support updated rowcount " + "- versioning cannot be verified." % + c.dialect.dialect_description) def _emit_insert_statements(base_mapper, uowtransaction, - cached_connections, mapper, table, insert): + cached_connections, mapper, table, insert, + bookkeeping=True): """Emit INSERT statements corresponding to value lists collected by _collect_insert_commands().""" - statement = base_mapper._memo(('insert', table), table.insert) + cached_stmt = base_mapper._memo(('insert', table), table.insert) for (connection, pkeys, hasvalue, has_all_pks, has_all_defaults), \ - records in groupby(insert, - lambda rec: (rec[4], - list(rec[2].keys()), - bool(rec[5]), - rec[6], rec[7]) - ): - if \ - ( - has_all_defaults - or not base_mapper.eager_defaults - or not connection.dialect.implicit_returning - ) and has_all_pks and not hasvalue: + records in groupby( + insert, + lambda rec: ( + rec[4], # connection + set(rec[2]), # parameter keys + bool(rec[5]), # whether we have "value" parameters + rec[6], + rec[7])): + + statement = cached_stmt + + if not bookkeeping or \ + ( + has_all_defaults + or not base_mapper.eager_defaults + or not connection.dialect.implicit_returning + ) and has_all_pks and not hasvalue: records = list(records) multiparams = [rec[2] for rec in records] c = cached_connections[connection].\ - execute(statement, multiparams) - - for (state, state_dict, params, mapper_rec, - conn, value_params, has_all_pks, has_all_defaults), \ - last_inserted_params in \ - zip(records, c.context.compiled_parameters): - _postfetch( + execute(statement, multiparams) + + if bookkeeping: + for (state, state_dict, params, mapper_rec, + conn, value_params, has_all_pks, has_all_defaults), \ + last_inserted_params in \ + zip(records, c.context.compiled_parameters): + _postfetch( mapper_rec, uowtransaction, table, @@ -582,45 +788,39 @@ def _emit_insert_statements(base_mapper, uowtransaction, statement = statement.return_defaults(mapper.version_id_col) for state, state_dict, params, mapper_rec, \ - connection, value_params, \ - has_all_pks, has_all_defaults in records: + connection, value_params, \ + has_all_pks, has_all_defaults in records: if value_params: result = connection.execute( - statement.values(value_params), - params) + statement.values(value_params), + params) else: result = cached_connections[connection].\ - execute(statement, params) + execute(statement, params) primary_key = result.context.inserted_primary_key if primary_key is not None: # set primary key attributes for pk, col in zip(primary_key, - mapper._pks_by_table[table]): + mapper._pks_by_table[table]): prop = mapper_rec._columntoproperty[col] if state_dict.get(prop.key) is None: - # TODO: would rather say: - #state_dict[prop.key] = pk - mapper_rec._set_state_attr_by_column( - state, - state_dict, - col, pk) - + state_dict[prop.key] = pk _postfetch( - mapper_rec, - uowtransaction, - table, - state, - state_dict, - result, - result.context.compiled_parameters[0], - value_params) + mapper_rec, + uowtransaction, + table, + state, + state_dict, + result, + result.context.compiled_parameters[0], + value_params) def _emit_post_update_statements(base_mapper, uowtransaction, - cached_connections, mapper, table, update): + cached_connections, mapper, table, update): """Emit UPDATE statements corresponding to value lists collected by _collect_post_update_commands().""" @@ -629,7 +829,7 @@ def update_stmt(): for col in mapper._pks_by_table[table]: clause.clauses.append(col == sql.bindparam(col._label, - type_=col.type)) + type_=col.type)) return table.update(clause) @@ -640,42 +840,48 @@ def update_stmt(): # also group them into common (connection, cols) sets # to support executemany(). for key, grouper in groupby( - update, lambda rec: (rec[4], list(rec[2].keys())) + update, lambda rec: ( + rec[1], # connection + set(rec[0]) # parameter keys + ) ): connection = key[0] - multiparams = [params for state, state_dict, - params, mapper, conn in grouper] + multiparams = [params for params, conn in grouper] cached_connections[connection].\ - execute(statement, multiparams) + execute(statement, multiparams) def _emit_delete_statements(base_mapper, uowtransaction, cached_connections, - mapper, table, delete): + mapper, table, delete): """Emit DELETE statements corresponding to value lists collected by _collect_delete_commands().""" need_version_id = mapper.version_id_col is not None and \ - table.c.contains_column(mapper.version_id_col) + mapper.version_id_col in mapper._cols_by_table[table] def delete_stmt(): clause = sql.and_() for col in mapper._pks_by_table[table]: clause.clauses.append( - col == sql.bindparam(col.key, type_=col.type)) + col == sql.bindparam(col.key, type_=col.type)) if need_version_id: clause.clauses.append( mapper.version_id_col == sql.bindparam( - mapper.version_id_col.key, - type_=mapper.version_id_col.type + mapper.version_id_col.key, + type_=mapper.version_id_col.type ) ) return table.delete(clause) - for connection, del_objects in delete.items(): - statement = base_mapper._memo(('delete', table), delete_stmt) + statement = base_mapper._memo(('delete', table), delete_stmt) + for connection, recs in groupby( + delete, + lambda rec: rec[1] # connection + ): + del_objects = [params for params, connection in recs] connection = cached_connections[connection] @@ -709,7 +915,7 @@ def delete_stmt(): connection.execute(statement, del_objects) if base_mapper.confirm_deleted_rows and \ - rows_matched > -1 and expected != rows_matched: + rows_matched > -1 and expected != rows_matched: if only_warn: util.warn( "DELETE statement on table '%s' expected to " @@ -727,15 +933,13 @@ def delete_stmt(): (table.description, expected, rows_matched) ) -def _finalize_insert_update_commands(base_mapper, uowtransaction, - states_to_insert, states_to_update): + +def _finalize_insert_update_commands(base_mapper, uowtransaction, states): """finalize state on states that have been inserted or updated, including calling after_insert/after_update events. """ - for state, state_dict, mapper, connection, has_identity, \ - instance_key, row_switch in states_to_insert + \ - states_to_update: + for state, state_dict, mapper, connection, has_identity in states: if mapper._readonly_props: readonly = state.unmodified_intersection( @@ -753,10 +957,9 @@ def _finalize_insert_update_commands(base_mapper, uowtransaction, if base_mapper.eager_defaults: toload_now.extend(state._unloaded_non_object) elif mapper.version_id_col is not None and \ - mapper.version_id_generator is False: - prop = mapper._columntoproperty[mapper.version_id_col] - if prop.key in state.unloaded: - toload_now.extend([prop.key]) + mapper.version_id_generator is False: + if mapper._version_id_prop.key in state.unloaded: + toload_now.extend([mapper._version_id_prop.key]) if toload_now: state.key = base_mapper._identity_key_from_state(state) @@ -773,45 +976,63 @@ def _finalize_insert_update_commands(base_mapper, uowtransaction, def _postfetch(mapper, uowtransaction, table, - state, dict_, result, params, value_params): + state, dict_, result, params, value_params, bulk=False): """Expire attributes in need of newly persisted database state, after an INSERT or UPDATE statement has proceeded for that state.""" - prefetch_cols = result.context.prefetch_cols - postfetch_cols = result.context.postfetch_cols - returning_cols = result.context.returning_cols + # TODO: bulk is never non-False, need to clean this up - if mapper.version_id_col is not None: + prefetch_cols = result.context.compiled.prefetch + postfetch_cols = result.context.compiled.postfetch + returning_cols = result.context.compiled.returning + + if mapper.version_id_col is not None and \ + mapper.version_id_col in mapper._cols_by_table[table]: prefetch_cols = list(prefetch_cols) + [mapper.version_id_col] + refresh_flush = bool(mapper.class_manager.dispatch.refresh_flush) + if refresh_flush: + load_evt_attrs = [] + if returning_cols: row = result.context.returned_defaults if row is not None: for col in returning_cols: if col.primary_key: continue - mapper._set_state_attr_by_column(state, dict_, col, row[col]) + dict_[mapper._columntoproperty[col].key] = row[col] + if refresh_flush: + load_evt_attrs.append(mapper._columntoproperty[col].key) for c in prefetch_cols: if c.key in params and c in mapper._columntoproperty: - mapper._set_state_attr_by_column(state, dict_, c, params[c.key]) + dict_[mapper._columntoproperty[c].key] = params[c.key] + if refresh_flush: + load_evt_attrs.append(mapper._columntoproperty[c].key) + + if refresh_flush and load_evt_attrs: + mapper.class_manager.dispatch.refresh_flush( + state, uowtransaction, load_evt_attrs) - if postfetch_cols: + if postfetch_cols and state: state._expire_attributes(state.dict, - [mapper._columntoproperty[c].key - for c in postfetch_cols if c in - mapper._columntoproperty] - ) + [mapper._columntoproperty[c].key + for c in postfetch_cols if c in + mapper._columntoproperty] + ) # synchronize newly inserted ids from one table to the next # TODO: this still goes a little too often. would be nice to # have definitive list of "columns that changed" here for m, equated_pairs in mapper._table_to_equated[table]: - sync.populate(state, m, state, m, - equated_pairs, - uowtransaction, - mapper.passive_updates) + if state is None: + sync.bulk_populate_inherit_keys(dict_, m, equated_pairs) + else: + sync.populate(state, m, state, m, + equated_pairs, + uowtransaction, + mapper.passive_updates) def _connections_for_states(base_mapper, uowtransaction, states): @@ -827,19 +1048,16 @@ def _connections_for_states(base_mapper, uowtransaction, states): # to use for update if uowtransaction.session.connection_callable: connection_callable = \ - uowtransaction.session.connection_callable + uowtransaction.session.connection_callable else: - connection = None + connection = uowtransaction.transaction.connection(base_mapper) connection_callable = None for state in _sort_states(states): if connection_callable: connection = connection_callable(base_mapper, state.obj()) - elif not connection: - connection = uowtransaction.transaction.connection( - base_mapper) - mapper = _state_mapper(state) + mapper = state.manager.mapper yield state, state.dict, mapper, connection @@ -848,8 +1066,8 @@ def _cached_connection_dict(base_mapper): # dictionary of connection->connection_with_cache_options. return util.PopulateDict( lambda conn: conn.execution_options( - compiled_cache=base_mapper._compiled_cache - )) + compiled_cache=base_mapper._compiled_cache + )) def _sort_states(states): @@ -857,7 +1075,7 @@ def _sort_states(states): persistent = set(s for s in pending if s.key is not None) pending.difference_update(persistent) return sorted(pending, key=operator.attrgetter("insert_order")) + \ - sorted(persistent, key=lambda q: q.key[1]) + sorted(persistent, key=lambda q: q.key[1]) class BulkUD(object): @@ -865,6 +1083,27 @@ class BulkUD(object): def __init__(self, query): self.query = query.enable_eagerloads(False) + self.mapper = self.query._bind_mapper() + self._validate_query_state() + + def _validate_query_state(self): + for attr, methname, notset, op in ( + ('_limit', 'limit()', None, operator.is_), + ('_offset', 'offset()', None, operator.is_), + ('_order_by', 'order_by()', False, operator.is_), + ('_group_by', 'group_by()', False, operator.is_), + ('_distinct', 'distinct()', False, operator.is_), + ( + '_from_obj', + 'join(), outerjoin(), select_from(), or from_self()', + (), operator.eq) + ): + if not op(getattr(self.query, attr), notset): + raise sa_exc.InvalidRequestError( + "Can't call Query.update() or Query.delete() " + "when %s has been called" % + (methname, ) + ) @property def session(self): @@ -876,9 +1115,9 @@ def _factory(cls, lookup, synchronize_session, *arg): klass = lookup[synchronize_session] except KeyError: raise sa_exc.ArgumentError( - "Valid strategies for session synchronization " - "are %s" % (", ".join(sorted(repr(x) - for x in lookup)))) + "Valid strategies for session synchronization " + "are %s" % (", ".join(sorted(repr(x) + for x in lookup)))) else: return klass(*arg) @@ -889,18 +1128,34 @@ def exec_(self): self._do_post_synchronize() self._do_post() - def _do_pre(self): + @util.dependencies("sqlalchemy.orm.query") + def _do_pre(self, querylib): query = self.query - self.context = context = query._compile_context() - if len(context.statement.froms) != 1 or \ - not isinstance(context.statement.froms[0], schema.Table): + self.context = querylib.QueryContext(query) + + if isinstance(query._entities[0], querylib._ColumnEntity): + # check for special case of query(table) + tables = set() + for ent in query._entities: + if not isinstance(ent, querylib._ColumnEntity): + tables.clear() + break + else: + tables.update(_from_objects(ent.column)) - self.primary_table = query._only_entity_zero( + if len(tables) != 1: + raise sa_exc.InvalidRequestError( "This operation requires only one Table or " "entity be specified as the target." - ).mapper.local_table + ) + else: + self.primary_table = tables.pop() + else: - self.primary_table = context.statement.froms[0] + self.primary_table = query._only_entity_zero( + "This operation requires only one Table or " + "entity be specified as the target." + ).mapper.local_table session = query.session @@ -922,11 +1177,13 @@ def _additional_evaluators(self, evaluator_compiler): def _do_pre_synchronize(self): query = self.query + target_cls = query._mapper_zero().class_ + try: - evaluator_compiler = evaluator.EvaluatorCompiler() + evaluator_compiler = evaluator.EvaluatorCompiler(target_cls) if query.whereclause is not None: eval_condition = evaluator_compiler.process( - query.whereclause) + query.whereclause) else: def eval_condition(obj): return True @@ -935,17 +1192,16 @@ def eval_condition(obj): except evaluator.UnevaluatableError: raise sa_exc.InvalidRequestError( - "Could not evaluate current criteria in Python. " - "Specify 'fetch' or False for the " - "synchronize_session parameter.") - target_cls = query._mapper_zero().class_ + "Could not evaluate current criteria in Python. " + "Specify 'fetch' or False for the " + "synchronize_session parameter.") - #TODO: detect when the where clause is a trivial primary key match + # TODO: detect when the where clause is a trivial primary key match self.matched_objects = [ - obj for (cls, pk), obj in - query.session.identity_map.items() - if issubclass(cls, target_cls) and - eval_condition(obj)] + obj for (cls, pk), obj in + query.session.identity_map.items() + if issubclass(cls, target_cls) and + eval_condition(obj)] class BulkFetch(BulkUD): @@ -954,35 +1210,76 @@ class BulkFetch(BulkUD): def _do_pre_synchronize(self): query = self.query session = query.session - select_stmt = self.context.statement.with_only_columns( - self.primary_table.primary_key) + context = query._compile_context() + select_stmt = context.statement.with_only_columns( + self.primary_table.primary_key) self.matched_rows = session.execute( - select_stmt, - params=query._params).fetchall() + select_stmt, + mapper=self.mapper, + params=query._params).fetchall() class BulkUpdate(BulkUD): """BulkUD which handles UPDATEs.""" - def __init__(self, query, values): + def __init__(self, query, values, update_kwargs): super(BulkUpdate, self).__init__(query) - self.query._no_select_modifiers("update") self.values = values + self.update_kwargs = update_kwargs @classmethod - def factory(cls, query, synchronize_session, values): + def factory(cls, query, synchronize_session, values, update_kwargs): return BulkUD._factory({ "evaluate": BulkUpdateEvaluate, "fetch": BulkUpdateFetch, False: BulkUpdate - }, synchronize_session, query, values) + }, synchronize_session, query, values, update_kwargs) + + def _resolve_string_to_expr(self, key): + if self.mapper and isinstance(key, util.string_types): + attr = _entity_descriptor(self.mapper, key) + return attr.__clause_element__() + else: + return key + + def _resolve_key_to_attrname(self, key): + if self.mapper and isinstance(key, util.string_types): + attr = _entity_descriptor(self.mapper, key) + return attr.property.key + elif isinstance(key, attributes.InstrumentedAttribute): + return key.key + elif hasattr(key, '__clause_element__'): + key = key.__clause_element__() + + if self.mapper and isinstance(key, expression.ColumnElement): + try: + attr = self.mapper._columntoproperty[key] + except orm_exc.UnmappedColumnError: + return None + else: + return attr.key + else: + raise sa_exc.InvalidRequestError( + "Invalid expression type: %r" % key) def _do_exec(self): + + values = [ + (self._resolve_string_to_expr(k), v) + for k, v in ( + self.values.items() if hasattr(self.values, 'items') + else self.values) + ] + if not self.update_kwargs.get('preserve_parameter_order', False): + values = dict(values) + update_stmt = sql.update(self.primary_table, - self.context.whereclause, self.values) + self.context.whereclause, values, + **self.update_kwargs) self.result = self.query.session.execute( - update_stmt, params=self.query._params) + update_stmt, params=self.query._params, + mapper=self.mapper) self.rowcount = self.result.rowcount def _do_post(self): @@ -995,7 +1292,6 @@ class BulkDelete(BulkUD): def __init__(self, query): super(BulkDelete, self).__init__(query) - self.query._no_select_modifiers("delete") @classmethod def factory(cls, query, synchronize_session): @@ -1007,10 +1303,12 @@ def factory(cls, query, synchronize_session): def _do_exec(self): delete_stmt = sql.delete(self.primary_table, - self.context.whereclause) + self.context.whereclause) - self.result = self.query.session.execute(delete_stmt, - params=self.query._params) + self.result = self.query.session.execute( + delete_stmt, + params=self.query._params, + mapper=self.mapper) self.rowcount = self.result.rowcount def _do_post(self): @@ -1024,10 +1322,13 @@ class BulkUpdateEvaluate(BulkEvaluate, BulkUpdate): def _additional_evaluators(self, evaluator_compiler): self.value_evaluators = {} - for key, value in self.values.items(): - key = _attr_as_key(key) - self.value_evaluators[key] = evaluator_compiler.process( - expression._literal_as_binds(value)) + values = (self.values.items() if hasattr(self.values, 'items') + else self.values) + for key, value in values: + key = self._resolve_key_to_attrname(key) + if key is not None: + self.value_evaluators[key] = evaluator_compiler.process( + expression._literal_as_binds(value)) def _do_post_synchronize(self): session = self.query.session @@ -1035,11 +1336,11 @@ def _do_post_synchronize(self): evaluated_keys = list(self.value_evaluators.keys()) for obj in self.matched_objects: state, dict_ = attributes.instance_state(obj),\ - attributes.instance_dict(obj) + attributes.instance_dict(obj) # only evaluate unmodified attributes to_evaluate = state.unmodified.intersection( - evaluated_keys) + evaluated_keys) for key in to_evaluate: dict_[key] = self.value_evaluators[key](obj) @@ -1048,8 +1349,8 @@ def _do_post_synchronize(self): # expire attributes with pending changes # (there was no autoflush, so they are overwritten) state._expire_attributes(dict_, - set(evaluated_keys). - difference(to_evaluate)) + set(evaluated_keys). + difference(to_evaluate)) states.add(state) session._register_altered(states) @@ -1060,8 +1361,8 @@ class BulkDeleteEvaluate(BulkEvaluate, BulkDelete): def _do_post_synchronize(self): self.query.session._remove_newly_deleted( - [attributes.instance_state(obj) - for obj in self.matched_objects]) + [attributes.instance_state(obj) + for obj in self.matched_objects]) class BulkUpdateFetch(BulkFetch, BulkUpdate): @@ -1076,7 +1377,7 @@ def _do_post_synchronize(self): attributes.instance_state(session.identity_map[identity_key]) for identity_key in [ target_mapper.identity_key_from_primary_key( - list(primary_key)) + list(primary_key)) for primary_key in self.matched_rows ] if identity_key in session.identity_map @@ -1098,7 +1399,7 @@ def _do_post_synchronize(self): # TODO: inline this and call remove_newly_deleted # once identity_key = target_mapper.identity_key_from_primary_key( - list(primary_key)) + list(primary_key)) if identity_key in session.identity_map: session._remove_newly_deleted( [attributes.instance_state( diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py index a0def7d31b..f8a353216d 100644 --- a/lib/sqlalchemy/orm/properties.py +++ b/lib/sqlalchemy/orm/properties.py @@ -1,5 +1,6 @@ # orm/properties.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -33,17 +34,24 @@ class ColumnProperty(StrategizedProperty): strategy_wildcard_key = 'column' + __slots__ = ( + '_orig_columns', 'columns', 'group', 'deferred', + 'instrument', 'comparator_factory', 'descriptor', 'extension', + 'active_history', 'expire_on_flush', 'info', 'doc', + 'strategy_class', '_creation_order', '_is_polymorphic_discriminator', + '_mapped_by_synonym', '_deferred_column_loader') + def __init__(self, *columns, **kwargs): """Provide a column-level property for use with a Mapper. Column-based properties can normally be applied to the mapper's ``properties`` dictionary using the :class:`.Column` element directly. - Use this function when the given column is not directly present within the - mapper's selectable; examples include SQL expressions, functions, and - scalar SELECT queries. + Use this function when the given column is not directly present within + the mapper's selectable; examples include SQL expressions, functions, + and scalar SELECT queries. - Columns that aren't present in the mapper's selectable won't be persisted - by the mapper and are effectively "read-only" attributes. + Columns that aren't present in the mapper's selectable won't be + persisted by the mapper and are effectively "read-only" attributes. :param \*cols: list of Column objects to be mapped. @@ -62,8 +70,8 @@ def __init__(self, *columns, **kwargs): .. versionadded:: 0.6.6 :param comparator_factory: a class which extends - :class:`.ColumnProperty.Comparator` which provides custom SQL clause - generation for comparison operations. + :class:`.ColumnProperty.Comparator` which provides custom SQL + clause generation for comparison operations. :param group: a group name for this property when marked as deferred. @@ -108,14 +116,15 @@ def __init__(self, *columns, **kwargs): **Deprecated.** Please see :class:`.AttributeEvents`. """ + super(ColumnProperty, self).__init__() self._orig_columns = [expression._labeled(c) for c in columns] self.columns = [expression._labeled(_orm_full_deannotate(c)) - for c in columns] + for c in columns] self.group = kwargs.pop('group', None) self.deferred = kwargs.pop('deferred', False) self.instrument = kwargs.pop('_instrument', True) self.comparator_factory = kwargs.pop('comparator_factory', - self.__class__.Comparator) + self.__class__.Comparator) self.descriptor = kwargs.pop('descriptor', None) self.extension = kwargs.pop('extension', None) self.active_history = kwargs.pop('active_history', False) @@ -144,9 +153,15 @@ def __init__(self, *columns, **kwargs): util.set_creation_order(self) self.strategy_class = self._strategy_lookup( - ("deferred", self.deferred), - ("instrument", self.instrument) - ) + ("deferred", self.deferred), + ("instrument", self.instrument) + ) + + @util.dependencies("sqlalchemy.orm.state", "sqlalchemy.orm.strategies") + def _memoized_attr__deferred_column_loader(self, state, strategies): + return state.InstanceState._instance_level_callable_processor( + self.parent.class_manager, + strategies.LoadDeferredColumns(self.key), self.key) @property def expression(self): @@ -165,7 +180,7 @@ def instrument_class(self, mapper): comparator=self.comparator_factory(self, mapper), parententity=mapper, doc=self.doc - ) + ) def do_init(self): super(ColumnProperty, self).do_init() @@ -180,18 +195,18 @@ def do_init(self): def copy(self): return ColumnProperty( - deferred=self.deferred, - group=self.group, - active_history=self.active_history, - *self.columns) + deferred=self.deferred, + group=self.group, + active_history=self.active_history, + *self.columns) def _getcommitted(self, state, dict_, column, - passive=attributes.PASSIVE_OFF): + passive=attributes.PASSIVE_OFF): return state.get_impl(self.key).\ - get_committed_value(state, dict_, passive=passive) + get_committed_value(state, dict_, passive=passive) def merge(self, session, source_state, source_dict, dest_state, - dest_dict, load, _recursive): + dest_dict, load, _recursive): if not self.instrument: return elif self.key in source_dict: @@ -205,7 +220,7 @@ def merge(self, session, source_state, source_dict, dest_state, elif dest_state.has_identity and self.key not in dest_dict: dest_state._expire_attributes(dest_dict, [self.key]) - class Comparator(PropComparator): + class Comparator(util.MemoizedSlots, PropComparator): """Produce boolean, comparison, and other operators for :class:`.ColumnProperty` attributes. @@ -223,24 +238,27 @@ class Comparator(PropComparator): :attr:`.TypeEngine.comparator_factory` """ - @util.memoized_instancemethod - def __clause_element__(self): + + __slots__ = '__clause_element__', 'info' + + def _memoized_method___clause_element__(self): if self.adapter: return self.adapter(self.prop.columns[0]) else: + # no adapter, so we aren't aliased + # assert self._parententity is self._parentmapper return self.prop.columns[0]._annotate({ - "parententity": self._parentmapper, - "parentmapper": self._parentmapper}) + "parententity": self._parententity, + "parentmapper": self._parententity}) - @util.memoized_property - def info(self): + def _memoized_attr_info(self): ce = self.__clause_element__() try: return ce.info except AttributeError: return self.prop.info - def __getattr__(self, key): + def _fallback_getattr(self, key): """proxy attribute access down to the mapped column. this allows user-defined comparison methods to be accessed. @@ -256,4 +274,3 @@ def reverse_operate(self, op, other, **kwargs): def __str__(self): return str(self.parent.class_.__name__) + "." + self.key - diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py index afcbf3500c..b5151c1096 100644 --- a/lib/sqlalchemy/orm/query.py +++ b/lib/sqlalchemy/orm/query.py @@ -1,5 +1,6 @@ # orm/query.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -23,19 +24,19 @@ from . import ( attributes, interfaces, object_mapper, persistence, exc as orm_exc, loading - ) +) from .base import _entity_descriptor, _is_aliased_class, \ - _is_mapped_class, _orm_columns, _generative + _is_mapped_class, _orm_columns, _generative, InspectionAttr from .path_registry import PathRegistry from .util import ( AliasedClass, ORMAdapter, join as orm_join, with_parent, aliased - ) +) from .. import sql, util, log, exc as sa_exc, inspect, inspection from ..sql.expression import _interpret_as_from from ..sql import ( - util as sql_util, - expression, visitors - ) + util as sql_util, + expression, visitors +) from ..sql.base import ColumnCollection from . import properties @@ -44,6 +45,7 @@ _path_registry = PathRegistry.root + @inspection._self_inspects @log.class_logger class Query(object): @@ -73,6 +75,7 @@ class Query(object): _having = None _distinct = False _prefixes = None + _suffixes = None _offset = None _limit = None _for_update_arg = None @@ -97,7 +100,8 @@ class Query(object): _with_options = () _with_hints = () _enable_single_crit = True - + _orm_only_adapt = True + _orm_only_from_obj_alias = True _current_path = _path_registry def __init__(self, entities, session=None): @@ -123,22 +127,19 @@ def _set_entity_selectables(self, entities): if entity not in d: ext_info = inspect(entity) if not ext_info.is_aliased_class and \ - ext_info.mapper.with_polymorphic: + ext_info.mapper.with_polymorphic: if ext_info.mapper.mapped_table not in \ - self._polymorphic_adapters: + self._polymorphic_adapters: self._mapper_loads_polymorphically_with( ext_info.mapper, sql_util.ColumnAdapter( - ext_info.selectable, - ext_info.mapper._equivalent_columns + ext_info.selectable, + ext_info.mapper._equivalent_columns ) ) aliased_adapter = None elif ext_info.is_aliased_class: - aliased_adapter = sql_util.ColumnAdapter( - ext_info.selectable, - ext_info.mapper._equivalent_columns - ) + aliased_adapter = ext_info._adapter else: aliased_adapter = None @@ -160,18 +161,18 @@ def _set_select_from(self, obj, set_base_alias): for from_obj in obj: info = inspect(from_obj) - if hasattr(info, 'mapper') and \ - (info.is_mapper or info.is_aliased_class): + (info.is_mapper or info.is_aliased_class): + self._select_from_entity = from_obj if set_base_alias: raise sa_exc.ArgumentError( - "A selectable (FromClause) instance is " - "expected when the base alias is being set.") + "A selectable (FromClause) instance is " + "expected when the base alias is being set.") fa.append(info.selectable) elif not info.is_selectable: raise sa_exc.ArgumentError( - "argument is not a mapped class, mapper, " - "aliased(), or FromClause instance.") + "argument is not a mapped class, mapper, " + "aliased(), or FromClause instance.") else: if isinstance(from_obj, expression.SelectBase): from_obj = from_obj.alias() @@ -182,11 +183,11 @@ def _set_select_from(self, obj, set_base_alias): self._from_obj = tuple(fa) if set_base_alias and \ - len(self._from_obj) == 1 and \ - isinstance(select_from_alias, expression.Alias): + len(self._from_obj) == 1 and \ + isinstance(select_from_alias, expression.Alias): equivs = self.__all_equivs() self._from_obj_alias = sql_util.ColumnAdapter( - self._from_obj[0], equivs) + self._from_obj[0], equivs) def _reset_polymorphic_adapter(self, mapper): for m2 in mapper._with_polymorphic_mappers: @@ -214,11 +215,11 @@ def _adapt_polymorphic_element(self, element): def _adapt_col_list(self, cols): return [ - self._adapt_clause( - expression._literal_as_text(o), - True, True) - for o in cols - ] + self._adapt_clause( + expression._literal_as_label_reference(o), + True, True) + for o in cols + ] @_generative() def _adapt_all_clauses(self): @@ -231,7 +232,8 @@ def _adapt_clause(self, clause, as_filter, orm_only): adapters = [] # do we adapt all expression elements or only those # tagged as 'ORM' constructs ? - orm_only = getattr(self, '_orm_only_adapt', orm_only) + if not self._orm_only_adapt: + orm_only = False if as_filter and self._filter_aliases: for fa in self._filter_aliases._visitor_iterator: @@ -248,7 +250,7 @@ def _adapt_clause(self, clause, as_filter, orm_only): # to all SQL constructs. adapters.append( ( - getattr(self, '_orm_only_from_obj_alias', orm_only), + orm_only if self._orm_only_from_obj_alias else False, self._from_obj_alias.replace ) ) @@ -268,25 +270,28 @@ def replace(elem): # if 'orm only', look for ORM annotations # in the element before adapting. if not _orm_only or \ - '_orm_adapt' in elem._annotations or \ - "parententity" in elem._annotations: + '_orm_adapt' in elem._annotations or \ + "parententity" in elem._annotations: e = adapter(elem) if e is not None: return e return visitors.replacement_traverse( - clause, - {}, - replace - ) + clause, + {}, + replace + ) def _entity_zero(self): return self._entities[0] def _mapper_zero(self): - return self._select_from_entity or \ - self._entity_zero().entity_zero + # TODO: self._select_from_entity is not a mapper + # so this method is misnamed + return self._select_from_entity \ + if self._select_from_entity is not None \ + else self._entity_zero().entity_zero @property def _mapper_entities(self): @@ -300,35 +305,38 @@ def _joinpoint_zero(self): self._mapper_zero() ) - def _mapper_zero_or_none(self): - if self._primary_entity: - return self._primary_entity.mapper - else: - return None + def _bind_mapper(self): + ezero = self._mapper_zero() + if ezero is not None: + insp = inspect(ezero) + if not insp.is_clause_element: + return insp.mapper + + return None def _only_mapper_zero(self, rationale=None): if len(self._entities) > 1: raise sa_exc.InvalidRequestError( - rationale or - "This operation requires a Query " - "against a single mapper." - ) + rationale or + "This operation requires a Query " + "against a single mapper." + ) return self._mapper_zero() def _only_full_mapper_zero(self, methname): if self._entities != [self._primary_entity]: raise sa_exc.InvalidRequestError( - "%s() can only be used against " - "a single mapped class." % methname) + "%s() can only be used against " + "a single mapped class." % methname) return self._primary_entity.entity_zero def _only_entity_zero(self, rationale=None): if len(self._entities) > 1: raise sa_exc.InvalidRequestError( - rationale or - "This operation requires a Query " - "against a single mapper." - ) + rationale or + "This operation requires a Query " + "against a single mapper." + ) return self._entity_zero() def __all_equivs(self): @@ -338,7 +346,8 @@ def __all_equivs(self): return equivs def _get_condition(self): - return self._no_criterion_condition("get", order_by=False, distinct=False) + return self._no_criterion_condition( + "get", order_by=False, distinct=False) def _get_existing_condition(self): self._no_criterion_assertion("get", order_by=False, distinct=False) @@ -352,8 +361,8 @@ def _no_criterion_assertion(self, meth, order_by=True, distinct=True): self._group_by or (order_by and self._order_by) or \ (distinct and self._distinct): raise sa_exc.InvalidRequestError( - "Query.%s() being called on a " - "Query with existing criterion. " % meth) + "Query.%s() being called on a " + "Query with existing criterion. " % meth) def _no_criterion_condition(self, meth, order_by=True, distinct=True): self._no_criterion_assertion(meth, order_by, distinct) @@ -367,8 +376,8 @@ def _no_clauseelement_condition(self, meth): return if self._order_by: raise sa_exc.InvalidRequestError( - "Query.%s() being called on a " - "Query with existing criterion. " % meth) + "Query.%s() being called on a " + "Query with existing criterion. " % meth) self._no_criterion_condition(meth) def _no_statement_condition(self, meth): @@ -392,26 +401,10 @@ def _no_limit_offset(self, meth): % (meth, meth) ) - def _no_select_modifiers(self, meth): - if not self._enable_assertions: - return - for attr, methname, notset in ( - ('_limit', 'limit()', None), - ('_offset', 'offset()', None), - ('_order_by', 'order_by()', False), - ('_group_by', 'group_by()', False), - ('_distinct', 'distinct()', False), - ): - if getattr(self, attr) is not notset: - raise sa_exc.InvalidRequestError( - "Can't call Query.%s() when %s has been called" % - (meth, methname) - ) - def _get_options(self, populate_existing=None, - version_check=None, - only_load_props=None, - refresh_state=None): + version_check=None, + only_load_props=None, + refresh_state=None): if populate_existing: self._populate_existing = populate_existing if version_check: @@ -439,11 +432,10 @@ def statement(self): """ stmt = self._compile_context(labels=self._with_labels).\ - statement + statement if self._params: stmt = stmt.params(self._params) - # TODO: there's no tests covering effects of # the annotation not being there return stmt._annotate({'no_replacement_traverse': True}) @@ -519,8 +511,8 @@ class Part(Base): parts_alias = aliased(Part, name="p") included_parts = included_parts.union_all( session.query( - parts_alias.part, parts_alias.sub_part, + parts_alias.part, parts_alias.quantity).\\ filter(parts_alias.part==incl_alias.c.sub_part) ) @@ -592,11 +584,19 @@ def enable_eagerloads(self, value): This is used primarily when nesting the Query's statement into a subquery or other - selectable. + selectable, or when using :meth:`.Query.yield_per`. """ self._enable_eagerloads = value + def _no_yield_per(self, message): + raise sa_exc.InvalidRequestError( + "The yield_per Query option is currently not " + "compatible with %s eager loading. Please " + "specify lazyload('*') or query.enable_eagerloads(False) in " + "order to " + "proceed with query.yield_per()." % message) + @_generative() def with_labels(self): """Apply column labels to the return value of Query.statement. @@ -610,6 +610,16 @@ def with_labels(self): When the `Query` actually issues SQL to load rows, it always uses column labeling. + .. note:: The :meth:`.Query.with_labels` method *only* applies + the output of :attr:`.Query.statement`, and *not* to any of + the result-row invoking systems of :class:`.Query` itself, e.g. + :meth:`.Query.first`, :meth:`.Query.all`, etc. To execute + a query using :meth:`.Query.with_labels`, invoke the + :attr:`.Query.statement` using :meth:`.Session.execute`:: + + result = session.execute(query.with_labels().statement) + + """ self._with_labels = True @@ -660,9 +670,9 @@ def _with_current_path(self, path): @_generative(_no_clauseelement_condition) def with_polymorphic(self, - cls_or_mappers, - selectable=None, - polymorphic_on=None): + cls_or_mappers, + selectable=None, + polymorphic_on=None): """Load columns for inheriting classes. :meth:`.Query.with_polymorphic` applies transformations @@ -690,40 +700,76 @@ def with_polymorphic(self, if not self._primary_entity: raise sa_exc.InvalidRequestError( - "No primary mapper set up for this Query.") + "No primary mapper set up for this Query.") entity = self._entities[0]._clone() self._entities = [entity] + self._entities[1:] entity.set_with_polymorphic(self, - cls_or_mappers, - selectable=selectable, - polymorphic_on=polymorphic_on) + cls_or_mappers, + selectable=selectable, + polymorphic_on=polymorphic_on) @_generative() def yield_per(self, count): """Yield only ``count`` rows at a time. - WARNING: use this method with caution; if the same instance is present - in more than one batch of rows, end-user changes to attributes will be - overwritten. + The purpose of this method is when fetching very large result sets + (> 10K rows), to batch results in sub-collections and yield them + out partially, so that the Python interpreter doesn't need to declare + very large areas of memory which is both time consuming and leads + to excessive memory use. The performance from fetching hundreds of + thousands of rows can often double when a suitable yield-per setting + (e.g. approximately 1000) is used, even with DBAPIs that buffer + rows (which are most). + + The :meth:`.Query.yield_per` method **is not compatible with most + eager loading schemes, including subqueryload and joinedload with + collections**. For this reason, it may be helpful to disable + eager loads, either unconditionally with + :meth:`.Query.enable_eagerloads`:: + + q = sess.query(Object).yield_per(100).enable_eagerloads(False) + + Or more selectively using :func:`.lazyload`; such as with + an asterisk to specify the default loader scheme:: + + q = sess.query(Object).yield_per(100).\\ + options(lazyload('*'), joinedload(Object.some_related)) + + .. warning:: + + Use this method with caution; if the same instance is + present in more than one batch of rows, end-user changes + to attributes will be overwritten. + + In particular, it's usually impossible to use this setting + with eagerly loaded collections (i.e. any lazy='joined' or + 'subquery') since those collections will be cleared for a + new load when encountered in a subsequent result batch. + In the case of 'subquery' loading, the full result for all + rows is fetched which generally defeats the purpose of + :meth:`~sqlalchemy.orm.query.Query.yield_per`. + + Also note that while + :meth:`~sqlalchemy.orm.query.Query.yield_per` will set the + ``stream_results`` execution option to True, currently + this is only understood by + :mod:`~sqlalchemy.dialects.postgresql.psycopg2` dialect + which will stream results using server side cursors + instead of pre-buffer all rows for this query. Other + DBAPIs **pre-buffer all rows** before making them + available. The memory use of raw database rows is much less + than that of an ORM-mapped object, but should still be taken into + consideration when benchmarking. - In particular, it's usually impossible to use this setting with - eagerly loaded collections (i.e. any lazy='joined' or 'subquery') - since those collections will be cleared for a new load when - encountered in a subsequent result batch. In the case of 'subquery' - loading, the full result for all rows is fetched which generally - defeats the purpose of :meth:`~sqlalchemy.orm.query.Query.yield_per`. + .. seealso:: - Also note that while :meth:`~sqlalchemy.orm.query.Query.yield_per` - will set the ``stream_results`` execution option to True, currently - this is only understood by :mod:`~sqlalchemy.dialects.postgresql.psycopg2` dialect - which will stream results using server side cursors instead of pre-buffer - all rows for this query. Other DBAPIs pre-buffer all rows before - making them available. + :meth:`.Query.enable_eagerloads` """ self._yield_per = count self._execution_options = self._execution_options.union( - {"stream_results": True}) + {"stream_results": True, + "max_row_buffer": count}) def get(self, ident): """Return an instance based on the given primary key identifier, @@ -767,7 +813,7 @@ def get(self, ident): foreign-key-to-primary-key criterion, will also use an operation equivalent to :meth:`~.Query.get` in order to retrieve the target value from the local identity map - before querying the database. See :doc:`/orm/loading` + before querying the database. See :doc:`/orm/loading_relationships` for further details on relationship loading. :param ident: A scalar or tuple value representing @@ -782,7 +828,9 @@ def get(self, ident): :return: The object instance, or ``None``. """ + return self._get_impl(ident, loading.load_on_ident) + def _get_impl(self, ident, fallback_fn): # convert composite types to individual args if hasattr(ident, '__composite_values__'): ident = ident.__composite_values__() @@ -793,9 +841,9 @@ def get(self, ident): if len(ident) != len(mapper.primary_key): raise sa_exc.InvalidRequestError( - "Incorrect number of values in identifier to formulate " - "primary key for query.get(); primary key columns are %s" % - ','.join("'%s'" % c for c in mapper.primary_key)) + "Incorrect number of values in identifier to formulate " + "primary key for query.get(); primary key columns are %s" % + ','.join("'%s'" % c for c in mapper.primary_key)) key = mapper.identity_key_from_primary_key(ident) @@ -813,7 +861,7 @@ def get(self, ident): return None return instance - return loading.load_on_ident(self, key) + return fallback_fn(self, key) @_generative() def correlate(self, *args): @@ -837,9 +885,9 @@ def correlate(self, *args): """ self._correlate = self._correlate.union( - _interpret_as_from(s) - if s is not None else None - for s in args) + _interpret_as_from(s) + if s is not None else None + for s in args) @_generative() def autoflush(self, setting): @@ -894,21 +942,23 @@ def with_parent(self, instance, property=None): """ if property is None: + mapper_zero = inspect(self._mapper_zero()).mapper + mapper = object_mapper(instance) for prop in mapper.iterate_properties: if isinstance(prop, properties.RelationshipProperty) and \ - prop.mapper is self._mapper_zero(): + prop.mapper is mapper_zero: property = prop break else: raise sa_exc.InvalidRequestError( - "Could not locate a property which relates instances " - "of class '%s' to instances of class '%s'" % - ( - self._mapper_zero().class_.__name__, - instance.__class__.__name__) - ) + "Could not locate a property which relates instances " + "of class '%s' to instances of class '%s'" % + ( + self._mapper_zero().class_.__name__, + instance.__class__.__name__) + ) return self.filter(with_parent(instance, property)) @@ -936,20 +986,182 @@ def from_self(self, *entities): """return a Query that selects from this Query's SELECT statement. - \*entities - optional list of entities which will replace - those being selected. + :meth:`.Query.from_self` essentially turns the SELECT statement + into a SELECT of itself. Given a query such as:: + + q = session.query(User).filter(User.name.like('e%')) + + Given the :meth:`.Query.from_self` version:: + + q = session.query(User).filter(User.name.like('e%')).from_self() + + This query renders as: + + .. sourcecode:: sql + + SELECT anon_1.user_id AS anon_1_user_id, + anon_1.user_name AS anon_1_user_name + FROM (SELECT "user".id AS user_id, "user".name AS user_name + FROM "user" + WHERE "user".name LIKE :name_1) AS anon_1 + + There are lots of cases where :meth:`.Query.from_self` may be useful. + A simple one is where above, we may want to apply a row LIMIT to + the set of user objects we query against, and then apply additional + joins against that row-limited set:: + + q = session.query(User).filter(User.name.like('e%')).\\ + limit(5).from_self().\\ + join(User.addresses).filter(Address.email.like('q%')) + + The above query joins to the ``Address`` entity but only against the + first five results of the ``User`` query: + + .. sourcecode:: sql + + SELECT anon_1.user_id AS anon_1_user_id, + anon_1.user_name AS anon_1_user_name + FROM (SELECT "user".id AS user_id, "user".name AS user_name + FROM "user" + WHERE "user".name LIKE :name_1 + LIMIT :param_1) AS anon_1 + JOIN address ON anon_1.user_id = address.user_id + WHERE address.email LIKE :email_1 + + **Automatic Aliasing** + + Another key behavior of :meth:`.Query.from_self` is that it applies + **automatic aliasing** to the entities inside the subquery, when + they are referenced on the outside. Above, if we continue to + refer to the ``User`` entity without any additional aliasing applied + to it, those references wil be in terms of the subquery:: + + q = session.query(User).filter(User.name.like('e%')).\\ + limit(5).from_self().\\ + join(User.addresses).filter(Address.email.like('q%')).\\ + order_by(User.name) + + The ORDER BY against ``User.name`` is aliased to be in terms of the + inner subquery: + + .. sourcecode:: sql + + SELECT anon_1.user_id AS anon_1_user_id, + anon_1.user_name AS anon_1_user_name + FROM (SELECT "user".id AS user_id, "user".name AS user_name + FROM "user" + WHERE "user".name LIKE :name_1 + LIMIT :param_1) AS anon_1 + JOIN address ON anon_1.user_id = address.user_id + WHERE address.email LIKE :email_1 ORDER BY anon_1.user_name + + The automatic aliasing feature only works in a **limited** way, + for simple filters and orderings. More ambitious constructions + such as referring to the entity in joins should prefer to use + explicit subquery objects, typically making use of the + :meth:`.Query.subquery` method to produce an explicit subquery object. + Always test the structure of queries by viewing the SQL to ensure + a particular structure does what's expected! + + **Changing the Entities** + + :meth:`.Query.from_self` also includes the ability to modify what + columns are being queried. In our example, we want ``User.id`` + to be queried by the inner query, so that we can join to the + ``Address`` entity on the outside, but we only wanted the outer + query to return the ``Address.email`` column:: + + q = session.query(User).filter(User.name.like('e%')).\\ + limit(5).from_self(Address.email).\\ + join(User.addresses).filter(Address.email.like('q%')) + + yielding: + + .. sourcecode:: sql + + SELECT address.email AS address_email + FROM (SELECT "user".id AS user_id, "user".name AS user_name + FROM "user" + WHERE "user".name LIKE :name_1 + LIMIT :param_1) AS anon_1 + JOIN address ON anon_1.user_id = address.user_id + WHERE address.email LIKE :email_1 + + **Looking out for Inner / Outer Columns** + + Keep in mind that when referring to columns that originate from + inside the subquery, we need to ensure they are present in the + columns clause of the subquery itself; this is an ordinary aspect of + SQL. For example, if we wanted to load from a joined entity inside + the subquery using :func:`.contains_eager`, we need to add those + columns. Below illustrates a join of ``Address`` to ``User``, + then a subquery, and then we'd like :func:`.contains_eager` to access + the ``User`` columns:: + + q = session.query(Address).join(Address.user).\\ + filter(User.name.like('e%')) + + q = q.add_entity(User).from_self().\\ + options(contains_eager(Address.user)) + + We use :meth:`.Query.add_entity` above **before** we call + :meth:`.Query.from_self` so that the ``User`` columns are present + in the inner subquery, so that they are available to the + :func:`.contains_eager` modifier we are using on the outside, + producing: + + .. sourcecode:: sql + + SELECT anon_1.address_id AS anon_1_address_id, + anon_1.address_email AS anon_1_address_email, + anon_1.address_user_id AS anon_1_address_user_id, + anon_1.user_id AS anon_1_user_id, + anon_1.user_name AS anon_1_user_name + FROM ( + SELECT address.id AS address_id, + address.email AS address_email, + address.user_id AS address_user_id, + "user".id AS user_id, + "user".name AS user_name + FROM address JOIN "user" ON "user".id = address.user_id + WHERE "user".name LIKE :name_1) AS anon_1 + + If we didn't call ``add_entity(User)``, but still asked + :func:`.contains_eager` to load the ``User`` entity, it would be + forced to add the table on the outside without the correct + join criteria - note the ``anon1, "user"`` phrase at + the end: + + .. sourcecode:: sql + + -- incorrect query + SELECT anon_1.address_id AS anon_1_address_id, + anon_1.address_email AS anon_1_address_email, + anon_1.address_user_id AS anon_1_address_user_id, + "user".id AS user_id, + "user".name AS user_name + FROM ( + SELECT address.id AS address_id, + address.email AS address_email, + address.user_id AS address_user_id + FROM address JOIN "user" ON "user".id = address.user_id + WHERE "user".name LIKE :name_1) AS anon_1, "user" + + :param \*entities: optional list of entities which will replace + those being selected. """ fromclause = self.with_labels().enable_eagerloads(False).\ - _enable_single_crit(False).\ - statement.correlate(None) + statement.correlate(None) q = self._from_selectable(fromclause) + q._enable_single_crit = False + q._select_from_entity = self._mapper_zero() if entities: q._set_entities(entities) return q @_generative() - def _enable_single_crit(self, val): + def _set_enable_single_crit(self, val): self._enable_single_crit = val @_generative() @@ -960,7 +1172,7 @@ def _from_selectable(self, fromclause): '_limit', '_offset', '_joinpath', '_joinpoint', '_distinct', '_having', - '_prefixes', + '_prefixes', '_suffixes' ): self.__dict__.pop(attr, None) self._set_select_from([fromclause], True) @@ -1037,8 +1249,9 @@ def add_columns(self, *column): self._set_entity_selectables(self._entities[l:]) @util.pending_deprecation("0.7", - ":meth:`.add_column` is superseded by :meth:`.add_columns`", - False) + ":meth:`.add_column` is superseded " + "by :meth:`.add_columns`", + False) def add_column(self, column): """Add a column expression to the list of result columns to be returned. @@ -1055,7 +1268,7 @@ def options(self, *args): Most supplied options regard changing how column- and relationship-mapped attributes are loaded. See the sections - :ref:`deferred` and :doc:`/orm/loading` for reference + :ref:`deferred` and :doc:`/orm/loading_relationships` for reference documentation. """ @@ -1101,7 +1314,8 @@ def transform(q): @_generative() def with_hint(self, selectable, text, dialect_name='*'): - """Add an indexing hint for the given entity or selectable to + """Add an indexing or other executional context + hint for the given entity or selectable to this :class:`.Query`. Functionality is passed straight through to @@ -1109,11 +1323,35 @@ def with_hint(self, selectable, text, dialect_name='*'): with the addition that ``selectable`` can be a :class:`.Table`, :class:`.Alias`, or ORM entity / mapped class /etc. + + .. seealso:: + + :meth:`.Query.with_statement_hint` + """ - selectable = inspect(selectable).selectable + if selectable is not None: + selectable = inspect(selectable).selectable self._with_hints += ((selectable, text, dialect_name),) + def with_statement_hint(self, text, dialect_name='*'): + """add a statement hint to this :class:`.Select`. + + This method is similar to :meth:`.Select.with_hint` except that + it does not require an individual table, and instead applies to the + statement as a whole. + + This feature calls down into :meth:`.Select.with_statement_hint`. + + .. versionadded:: 1.0.0 + + .. seealso:: + + :meth:`.Query.with_hint` + + """ + return self.with_hint(None, text, dialect_name) + @_generative() def execution_options(self, **kwargs): """ Set non-SQL options which take effect during execution. @@ -1203,8 +1441,8 @@ def params(self, *args, **kwargs): kwargs.update(args[0]) elif len(args) > 0: raise sa_exc.ArgumentError( - "params() takes zero or one positional argument, " - "which is a dictionary.") + "params() takes zero or one positional argument, " + "which is a dictionary.") self._params = self._params.copy() self._params.update(kwargs) @@ -1217,7 +1455,9 @@ def filter(self, *criterion): session.query(MyClass).filter(MyClass.name == 'some name') - Multiple criteria are joined together by AND:: + Multiple criteria may be specified as comma separated; the effect + is that they will be joined together using the :func:`.and_` + function:: session.query(MyClass).\\ filter(MyClass.name == 'some name', MyClass.id > 5) @@ -1226,16 +1466,13 @@ def filter(self, *criterion): WHERE clause of a select. String expressions are coerced into SQL expression constructs via the :func:`.text` construct. - .. versionchanged:: 0.7.5 - Multiple criteria joined by AND. - .. seealso:: :meth:`.Query.filter_by` - filter on keyword expressions. """ for criterion in list(criterion): - criterion = expression._literal_as_text(criterion) + criterion = expression._expression_literal_as_text(criterion) criterion = self._adapt_clause(criterion, True, True) @@ -1252,7 +1489,9 @@ def filter_by(self, **kwargs): session.query(MyClass).filter_by(name = 'some name') - Multiple criteria are joined together by AND:: + Multiple criteria may be specified as comma separated; the effect + is that they will be joined together using the :func:`.and_` + function:: session.query(MyClass).\\ filter_by(name = 'some name', id = 5) @@ -1268,7 +1507,7 @@ def filter_by(self, **kwargs): """ clauses = [_entity_descriptor(self._joinpoint_zero(), key) == value - for key, value in kwargs.items()] + for key, value in kwargs.items()] return self.filter(sql.and_(*clauses)) @_generative(_no_statement_condition, _no_limit_offset) @@ -1321,7 +1560,8 @@ def having(self, criterion): """apply a HAVING criterion to the query and return the newly resulting :class:`.Query`. - :meth:`~.Query.having` is used in conjunction with :meth:`~.Query.group_by`. + :meth:`~.Query.having` is used in conjunction with + :meth:`~.Query.group_by`. HAVING criterion makes it possible to use filters on aggregate functions like COUNT, SUM, AVG, MAX, and MIN, eg.:: @@ -1333,14 +1573,13 @@ def having(self, criterion): """ - if isinstance(criterion, util.string_types): - criterion = sql.text(criterion) + criterion = expression._expression_literal_as_text(criterion) if criterion is not None and \ not isinstance(criterion, sql.ClauseElement): raise sa_exc.ArgumentError( - "having() argument must be of type " - "sqlalchemy.sql.ClauseElement or string") + "having() argument must be of type " + "sqlalchemy.sql.ClauseElement or string") criterion = self._adapt_clause(criterion, True, True) @@ -1388,7 +1627,7 @@ def union(self, *q): """ return self._from_selectable( - expression.union(*([self] + list(q)))) + expression.union(*([self] + list(q)))) def union_all(self, *q): """Produce a UNION ALL of this Query against one or more queries. @@ -1398,8 +1637,8 @@ def union_all(self, *q): """ return self._from_selectable( - expression.union_all(*([self] + list(q))) - ) + expression.union_all(*([self] + list(q))) + ) def intersect(self, *q): """Produce an INTERSECT of this Query against one or more queries. @@ -1409,8 +1648,8 @@ def intersect(self, *q): """ return self._from_selectable( - expression.intersect(*([self] + list(q))) - ) + expression.intersect(*([self] + list(q))) + ) def intersect_all(self, *q): """Produce an INTERSECT ALL of this Query against one or more queries. @@ -1420,8 +1659,8 @@ def intersect_all(self, *q): """ return self._from_selectable( - expression.intersect_all(*([self] + list(q))) - ) + expression.intersect_all(*([self] + list(q))) + ) def except_(self, *q): """Produce an EXCEPT of this Query against one or more queries. @@ -1431,8 +1670,8 @@ def except_(self, *q): """ return self._from_selectable( - expression.except_(*([self] + list(q))) - ) + expression.except_(*([self] + list(q))) + ) def except_all(self, *q): """Produce an EXCEPT ALL of this Query against one or more queries. @@ -1442,8 +1681,8 @@ def except_all(self, *q): """ return self._from_selectable( - expression.except_all(*([self] + list(q))) - ) + expression.except_all(*([self] + list(q))) + ) def join(self, *props, **kwargs): """Create a SQL JOIN against this :class:`.Query` object's criterion @@ -1453,8 +1692,8 @@ def join(self, *props, **kwargs): Consider a mapping between two classes ``User`` and ``Address``, with a relationship ``User.addresses`` representing a collection - of ``Address`` objects associated with each ``User``. The most common - usage of :meth:`~.Query.join` is to create a JOIN along this + of ``Address`` objects associated with each ``User``. The most + common usage of :meth:`~.Query.join` is to create a JOIN along this relationship, using the ``User.addresses`` attribute as an indicator for how this should occur:: @@ -1671,6 +1910,14 @@ def join(self, *props, **kwargs): anonymously aliased. Subsequent calls to :meth:`~.Query.filter` and similar will adapt the incoming criterion to the target alias, until :meth:`~.Query.reset_joinpoint` is called. + :param isouter=False: If True, the join used will be a left outer join, + just as if the :meth:`.Query.outerjoin` method were called. This + flag is here to maintain consistency with the same flag as accepted + by :meth:`.FromClause.join` and other Core constructs. + + + .. versionadded:: 1.0.0 + :param from_joinpoint=False: When using ``aliased=True``, a setting of True here will cause the join to be from the most recent joined target, rather than starting back from the original @@ -1680,22 +1927,23 @@ def join(self, *props, **kwargs): :ref:`ormtutorial_joins` in the ORM tutorial. - :ref:`inheritance_toplevel` for details on how :meth:`~.Query.join` - is used for inheritance relationships. + :ref:`inheritance_toplevel` for details on how + :meth:`~.Query.join` is used for inheritance relationships. :func:`.orm.join` - a standalone ORM-level join function, used internally by :meth:`.Query.join`, which in previous SQLAlchemy versions was the primary ORM-level joining interface. """ - aliased, from_joinpoint = kwargs.pop('aliased', False),\ - kwargs.pop('from_joinpoint', False) + aliased, from_joinpoint, isouter = kwargs.pop('aliased', False),\ + kwargs.pop('from_joinpoint', False),\ + kwargs.pop('isouter', False) if kwargs: raise TypeError("unknown arguments: %s" % - ','.join(kwargs.keys)) + ', '.join(sorted(kwargs))) return self._join(props, - outerjoin=False, create_aliases=aliased, - from_joinpoint=from_joinpoint) + outerjoin=isouter, create_aliases=aliased, + from_joinpoint=from_joinpoint) def outerjoin(self, *props, **kwargs): """Create a left outer join against this ``Query`` object's criterion @@ -1705,13 +1953,13 @@ def outerjoin(self, *props, **kwargs): """ aliased, from_joinpoint = kwargs.pop('aliased', False), \ - kwargs.pop('from_joinpoint', False) + kwargs.pop('from_joinpoint', False) if kwargs: raise TypeError("unknown arguments: %s" % - ','.join(kwargs)) + ', '.join(sorted(kwargs))) return self._join(props, - outerjoin=True, create_aliases=aliased, - from_joinpoint=from_joinpoint) + outerjoin=True, create_aliases=aliased, + from_joinpoint=from_joinpoint) def _update_joinpoint(self, jp): self._joinpoint = jp @@ -1737,14 +1985,15 @@ def _join(self, keys, outerjoin, create_aliases, from_joinpoint): if len(keys) == 2 and \ isinstance(keys[0], (expression.FromClause, - type, AliasedClass)) and \ + type, AliasedClass)) and \ isinstance(keys[1], (str, expression.ClauseElement, - interfaces.PropComparator)): + interfaces.PropComparator)): # detect 2-arg form of join and # convert to a tuple. keys = (keys,) - for arg1 in util.to_list(keys): + keylist = util.to_list(keys) + for idx, arg1 in enumerate(keylist): if isinstance(arg1, tuple): # "tuple" form of join, multiple # tuples are accepted as well. The simpler @@ -1758,13 +2007,19 @@ def _join(self, keys, outerjoin, create_aliases, from_joinpoint): # is a little bit of legacy behavior still at work here # which means they might be in either order. may possibly # lock this down to (right_entity, onclause) in 0.6. - if isinstance(arg1, (interfaces.PropComparator, util.string_types)): + if isinstance( + arg1, (interfaces.PropComparator, util.string_types)): right_entity, onclause = arg2, arg1 else: right_entity, onclause = arg1, arg2 left_entity = prop = None + if isinstance(onclause, interfaces.PropComparator): + of_type = getattr(onclause, '_of_type', None) + else: + of_type = None + if isinstance(onclause, util.string_types): left_entity = self._joinpoint_zero() @@ -1774,7 +2029,7 @@ def _join(self, keys, outerjoin, create_aliases, from_joinpoint): # check for q.join(Class.propname, from_joinpoint=True) # and Class is that of the current joinpoint elif from_joinpoint and \ - isinstance(onclause, interfaces.PropComparator): + isinstance(onclause, interfaces.PropComparator): left_entity = onclause._parententity info = inspect(self._joinpoint_zero()) @@ -1786,13 +2041,11 @@ def _join(self, keys, outerjoin, create_aliases, from_joinpoint): if left_mapper is left_entity: left_entity = self._joinpoint_zero() descriptor = _entity_descriptor(left_entity, - onclause.key) + onclause.key) onclause = descriptor if isinstance(onclause, interfaces.PropComparator): if right_entity is None: - right_entity = onclause.property.mapper - of_type = getattr(onclause, '_of_type', None) if of_type: right_entity = of_type else: @@ -1801,7 +2054,7 @@ def _join(self, keys, outerjoin, create_aliases, from_joinpoint): left_entity = onclause._parententity prop = onclause.property - if not isinstance(onclause, attributes.QueryableAttribute): + if not isinstance(onclause, attributes.QueryableAttribute): onclause = prop if not create_aliases: @@ -1819,6 +2072,11 @@ def _join(self, keys, outerjoin, create_aliases, from_joinpoint): jp = self._joinpoint[edge].copy() jp['prev'] = (edge, self._joinpoint) self._update_joinpoint(jp) + + if idx == len(keylist) - 1: + util.warn( + "Pathed join target %s has already " + "been joined to; skipping" % prop) continue elif onclause is not None and right_entity is None: @@ -1826,10 +2084,9 @@ def _join(self, keys, outerjoin, create_aliases, from_joinpoint): raise NotImplementedError("query.join(a==b) not supported.") self._join_left_to_right( - left_entity, - right_entity, onclause, - outerjoin, create_aliases, prop) - + left_entity, + right_entity, onclause, + outerjoin, create_aliases, prop) def _join_left_to_right(self, left, right, onclause, outerjoin, create_aliases, prop): @@ -1845,48 +2102,47 @@ def _join_left_to_right(self, left, right, if left is None: raise sa_exc.InvalidRequestError( - "Don't know how to join from %s; please use " - "select_from() to establish the left " - "entity/selectable of this join" % self._entities[0]) + "Don't know how to join from %s; please use " + "select_from() to establish the left " + "entity/selectable of this join" % self._entities[0]) if left is right and \ not create_aliases: raise sa_exc.InvalidRequestError( - "Can't construct a join from %s to %s, they " - "are the same entity" % - (left, right)) + "Can't construct a join from %s to %s, they " + "are the same entity" % + (left, right)) l_info = inspect(left) r_info = inspect(right) - overlap = False if not create_aliases: right_mapper = getattr(r_info, "mapper", None) # if the target is a joined inheritance mapping, # be more liberal about auto-aliasing. if right_mapper and ( - right_mapper.with_polymorphic or - isinstance(right_mapper.mapped_table, expression.Join) - ): + right_mapper.with_polymorphic or + isinstance(right_mapper.mapped_table, expression.Join) + ): for from_obj in self._from_obj or [l_info.selectable]: - if sql_util.selectables_overlap(l_info.selectable, from_obj) and \ - sql_util.selectables_overlap(from_obj, r_info.selectable): + if sql_util.selectables_overlap( + l_info.selectable, from_obj) and \ + sql_util.selectables_overlap( + from_obj, r_info.selectable): overlap = True break - elif sql_util.selectables_overlap(l_info.selectable, r_info.selectable): - overlap = True - - if overlap and l_info.selectable is r_info.selectable: + if (overlap or not create_aliases) and \ + l_info.selectable is r_info.selectable: raise sa_exc.InvalidRequestError( - "Can't join table/selectable '%s' to itself" % - l_info.selectable) + "Can't join table/selectable '%s' to itself" % + l_info.selectable) right, onclause = self._prepare_right_side( - r_info, right, onclause, - create_aliases, - prop, overlap) + r_info, right, onclause, + create_aliases, + prop, overlap) # if joining on a MapperProperty path, # track the path to prevent redundant joins @@ -1901,7 +2157,7 @@ def _join_left_to_right(self, left, right, self._join_to_left(l_info, left, right, onclause, outerjoin) def _prepare_right_side(self, r_info, right, onclause, create_aliases, - prop, overlap): + prop, overlap): info = r_info right_mapper, right_selectable, right_is_aliased = \ @@ -1915,8 +2171,8 @@ def _prepare_right_side(self, r_info, right, onclause, create_aliases, if right_mapper and prop and \ not right_mapper.common_parent(prop.mapper): raise sa_exc.InvalidRequestError( - "Join target %s does not correspond to " - "the right side of join condition %s" % (right, onclause) + "Join target %s does not correspond to " + "the right side of join condition %s" % (right, onclause) ) if not right_mapper and prop: @@ -1926,11 +2182,11 @@ def _prepare_right_side(self, r_info, right, onclause, create_aliases, if right_mapper and right is right_selectable: if not right_selectable.is_derived_from( - right_mapper.mapped_table): + right_mapper.mapped_table): raise sa_exc.InvalidRequestError( "Selectable '%s' is not derived from '%s'" % (right_selectable.description, - right_mapper.mapped_table.description)) + right_mapper.mapped_table.description)) if isinstance(right_selectable, expression.SelectBase): # TODO: this isn't even covered now! @@ -1940,16 +2196,16 @@ def _prepare_right_side(self, r_info, right, onclause, create_aliases, right = aliased(right_mapper, right_selectable) aliased_entity = right_mapper and \ - not right_is_aliased and \ - ( - right_mapper.with_polymorphic and isinstance( - right_mapper._with_polymorphic_selectable, - expression.Alias) - or - overlap # test for overlap: - # orm/inheritance/relationships.py - # SelfReferentialM2MTest - ) + not right_is_aliased and \ + ( + right_mapper.with_polymorphic and isinstance( + right_mapper._with_polymorphic_selectable, + expression.Alias) + or + overlap # test for overlap: + # orm/inheritance/relationships.py + # SelfReferentialM2MTest + ) if not need_adapter and (create_aliases or aliased_entity): right = aliased(right, flat=True) @@ -1959,10 +2215,11 @@ def _prepare_right_side(self, r_info, right, onclause, create_aliases, # apply an adapter to all subsequent filter() calls # until reset_joinpoint() is called. if need_adapter: - self._filter_aliases = ORMAdapter(right, - equivalents=right_mapper and - right_mapper._equivalent_columns or {}, - chain_to=self._filter_aliases) + self._filter_aliases = ORMAdapter( + right, + equivalents=right_mapper and + right_mapper._equivalent_columns or {}, + chain_to=self._filter_aliases) # if the onclause is a ClauseElement, adapt it with any # adapters that are in place right now @@ -1975,12 +2232,12 @@ def _prepare_right_side(self, r_info, right, onclause, create_aliases, # set are also adapted. if aliased_entity and not create_aliases: self._mapper_loads_polymorphically_with( - right_mapper, - ORMAdapter( - right, - equivalents=right_mapper._equivalent_columns - ) - ) + right_mapper, + ORMAdapter( + right, + equivalents=right_mapper._equivalent_columns + ) + ) return right, onclause @@ -1991,22 +2248,22 @@ def _join_to_left(self, l_info, left, right, onclause, outerjoin): if self._from_obj: replace_clause_index, clause = sql_util.find_join_source( - self._from_obj, - left_selectable) + self._from_obj, + left_selectable) if clause is not None: try: clause = orm_join(clause, - right, - onclause, isouter=outerjoin) + right, + onclause, isouter=outerjoin) except sa_exc.ArgumentError as ae: raise sa_exc.InvalidRequestError( - "Could not find a FROM clause to join from. " - "Tried joining to %s, but got: %s" % (right, ae)) + "Could not find a FROM clause to join from. " + "Tried joining to %s, but got: %s" % (right, ae)) self._from_obj = \ - self._from_obj[:replace_clause_index] + \ - (clause, ) + \ - self._from_obj[replace_clause_index + 1:] + self._from_obj[:replace_clause_index] + \ + (clause, ) + \ + self._from_obj[replace_clause_index + 1:] return if left_mapper: @@ -2024,8 +2281,8 @@ def _join_to_left(self, l_info, left, right, onclause, outerjoin): clause = orm_join(clause, right, onclause, isouter=outerjoin) except sa_exc.ArgumentError as ae: raise sa_exc.InvalidRequestError( - "Could not find a FROM clause to join from. " - "Tried joining to %s, but got: %s" % (right, ae)) + "Could not find a FROM clause to join from. " + "Tried joining to %s, but got: %s" % (right, ae)) self._from_obj = self._from_obj + (clause,) def _reset_joinpoint(self): @@ -2183,14 +2440,14 @@ def __getitem__(self, item): start, stop, step = util.decode_slice(item) if isinstance(stop, int) and \ - isinstance(start, int) and \ - stop - start <= 0: + isinstance(start, int) and \ + stop - start <= 0: return [] # perhaps we should execute a count() here so that we # can still use LIMIT/OFFSET ? elif (isinstance(start, int) and start < 0) \ - or (isinstance(stop, int) and stop < 0): + or (isinstance(stop, int) and stop < 0): return list(self)[item] res = self.slice(start, stop) @@ -2206,9 +2463,35 @@ def __getitem__(self, item): @_generative(_no_statement_condition) def slice(self, start, stop): - """apply LIMIT/OFFSET to the ``Query`` based on a " - "range and return the newly resulting ``Query``.""" + """Computes the "slice" of the :class:`.Query` represented by + the given indices and returns the resulting :class:`.Query`. + + The start and stop indices behave like the argument to Python's + built-in :func:`range` function. This method provides an + alternative to using ``LIMIT``/``OFFSET`` to get a slice of the + query. + + For example, :: + + session.query(User).order_by(User.id).slice(1, 3) + renders as + + .. sourcecode:: sql + + SELECT users.id AS users_id, + users.name AS users_name + FROM users ORDER BY users.id + LIMIT ? OFFSET ? + (2, 1) + + .. seealso:: + + :meth:`.Query.limit` + + :meth:`.Query.offset` + + """ if start is not None and stop is not None: self._offset = (self._offset or 0) + start self._limit = stop - start @@ -2223,7 +2506,6 @@ def slice(self, start, stop): @_generative(_no_statement_condition) def limit(self, limit): """Apply a ``LIMIT`` to the query and return the newly resulting - ``Query``. """ @@ -2242,6 +2524,19 @@ def distinct(self, *criterion): """Apply a ``DISTINCT`` to the query and return the newly resulting ``Query``. + + .. note:: + + The :meth:`.distinct` call includes logic that will automatically + add columns from the ORDER BY of the query to the columns + clause of the SELECT statement, to satisfy the common need + of the database backend that ORDER BY columns be part of the + SELECT list when DISTINCT is used. These columns *are not* + added to the list of columns actually fetched by the + :class:`.Query`, however, so would not affect results. + The columns are passed through when using the + :attr:`.Query.statement` accessor, however. + :param \*expr: optional column expressions. When present, the Postgresql dialect will render a ``DISTINCT ON (>)`` construct. @@ -2277,12 +2572,38 @@ def prefix_with(self, *prefixes): .. versionadded:: 0.7.7 + .. seealso:: + + :meth:`.HasPrefixes.prefix_with` + """ if self._prefixes: self._prefixes += prefixes else: self._prefixes = prefixes + @_generative() + def suffix_with(self, *suffixes): + """Apply the suffix to the query and return the newly resulting + ``Query``. + + :param \*suffixes: optional suffixes, typically strings, + not using any commas. + + .. versionadded:: 1.0.0 + + .. seealso:: + + :meth:`.Query.prefix_with` + + :meth:`.HasSuffixes.suffix_with` + + """ + if self._suffixes: + self._suffixes += suffixes + else: + self._suffixes = suffixes + def all(self): """Return the results represented by this ``Query`` as a list. @@ -2298,20 +2619,25 @@ def from_statement(self, statement): This method bypasses all internal statement compilation, and the statement is executed without modification. - The statement argument is either a string, a ``select()`` construct, - or a ``text()`` construct, and should return the set of columns - appropriate to the entity class represented by this ``Query``. + The statement is typically either a :func:`~.expression.text` + or :func:`~.expression.select` construct, and should return the set + of columns + appropriate to the entity class represented by this :class:`.Query`. + + .. seealso:: + + :ref:`orm_tutorial_literal_sql` - usage examples in the + ORM tutorial """ - if isinstance(statement, util.string_types): - statement = sql.text(statement) + statement = expression._expression_literal_as_text(statement) if not isinstance(statement, - (expression.TextClause, - expression.SelectBase)): + (expression.TextClause, + expression.SelectBase)): raise sa_exc.ArgumentError( - "from_statement accepts text(), select(), " - "and union() objects only.") + "from_statement accepts text(), select(), " + "and union() objects only.") self._statement = statement @@ -2324,7 +2650,7 @@ def first(self): (note this may consist of multiple result rows if join-loaded collections are present). - Calling ``first()`` results in an execution of the underlying query. + Calling :meth:`.Query.first` results in an execution of the underlying query. """ if self._statement is not None: @@ -2336,26 +2662,57 @@ def first(self): else: return None + def one_or_none(self): + """Return at most one result or raise an exception. + + Returns ``None`` if the query selects + no rows. Raises ``sqlalchemy.orm.exc.MultipleResultsFound`` + if multiple object identities are returned, or if multiple + rows are returned for a query that returns only scalar values + as opposed to full identity-mapped entities. + + Calling :meth:`.Query.one_or_none` results in an execution of the underlying + query. + + .. versionadded:: 1.0.9 + + Added :meth:`.Query.one_or_none` + + .. seealso:: + + :meth:`.Query.first` + + :meth:`.Query.one` + + + """ + ret = list(self) + + l = len(ret) + if l == 1: + return ret[0] + elif l == 0: + return None + else: + raise orm_exc.MultipleResultsFound( + "Multiple rows were found for one_or_none()") + def one(self): """Return exactly one result or raise an exception. Raises ``sqlalchemy.orm.exc.NoResultFound`` if the query selects no rows. Raises ``sqlalchemy.orm.exc.MultipleResultsFound`` if multiple object identities are returned, or if multiple - rows are returned for a query that does not return object - identities. + rows are returned for a query that returns only scalar values + as opposed to full identity-mapped entities. - Note that an entity query, that is, one which selects one or - more mapped classes as opposed to individual column attributes, - may ultimately represent many rows but only one row of - unique entity or entities - this is a successful result for one(). + Calling :meth:`.one` results in an execution of the underlying query. + + .. seealso:: - Calling ``one()`` results in an execution of the underlying query. + :meth:`.Query.first` - .. versionchanged:: 0.6 - ``one()`` fully fetches all results instead of applying - any kind of limit, so that the "unique"-ing of entities does not - conceal multiple object identities. + :meth:`.Query.one_or_none` """ ret = list(self) @@ -2405,19 +2762,19 @@ def __iter__(self): def _connection_from_session(self, **kw): conn = self.session.connection( - **kw) + **kw) if self._execution_options: conn = conn.execution_options(**self._execution_options) return conn def _execute_and_instances(self, querycontext): conn = self._connection_from_session( - mapper=self._mapper_zero_or_none(), - clause=querycontext.statement, - close_with_result=True) + mapper=self._bind_mapper(), + clause=querycontext.statement, + close_with_result=True) result = conn.execute(querycontext.statement, self._params) - return loading.instances(self, result, querycontext) + return loading.instances(querycontext.query, result, querycontext) @property def column_descriptions(self): @@ -2439,30 +2796,46 @@ def column_descriptions(self): 'type':User, 'aliased':False, 'expr':User, + 'entity': User }, { 'name':'id', 'type':Integer(), 'aliased':False, 'expr':User.id, + 'entity': User }, { 'name':'user2', 'type':User, 'aliased':True, - 'expr':user_alias + 'expr':user_alias, + 'entity': user_alias } ] """ + return [ { 'name': ent._label_name, 'type': ent.type, - 'aliased': getattr(ent, 'is_aliased_class', False), - 'expr': ent.expr + 'aliased': getattr(insp_ent, 'is_aliased_class', False), + 'expr': ent.expr, + 'entity': + getattr(insp_ent, "entity", None) + if ent.entity_zero is not None + and not insp_ent.is_clause_element + else None } - for ent in self._entities + for ent, insp_ent in [ + ( + _ent, + (inspect(_ent.entity_zero) + if _ent.entity_zero is not None else None) + ) + for _ent in self._entities + ] ] def instances(self, cursor, __context=None): @@ -2514,6 +2887,7 @@ def _select_args(self): 'offset': self._offset, 'distinct': self._distinct, 'prefixes': self._prefixes, + 'suffixes': self._suffixes, 'group_by': self._group_by or None, 'having': self._having } @@ -2540,6 +2914,19 @@ def exists(self): SELECT 1 FROM users WHERE users.name = :name_1 ) AS anon_1 + The EXISTS construct is usually used in the WHERE clause:: + + session.query(User.id).filter(q.exists()).scalar() + + Note that some databases such as SQL Server don't allow an + EXISTS expression to be present in the columns clause of a + SELECT. To select a simple boolean value based on the exists + as a WHERE, use :func:`.literal`:: + + from sqlalchemy import literal + + session.query(literal(True)).filter(q.exists()).scalar() + .. versionadded:: 0.8.1 """ @@ -2550,7 +2937,7 @@ def exists(self): # .with_only_columns() after we have a core select() so that # we get just "SELECT 1" without any entities. return sql.exists(self.add_columns('1').with_labels(). - statement.with_only_columns(['1'])) + statement.with_only_columns([1])) def count(self): """Return a count of rows this Query would return. @@ -2597,6 +2984,18 @@ def delete(self, synchronize_session='evaluate'): Deletes rows matched by this query from the database. + E.g.:: + + sess.query(User).filter(User.age == 25).\\ + delete(synchronize_session=False) + + sess.query(User).filter(User.age == 25).\\ + delete(synchronize_session='evaluate') + + .. warning:: The :meth:`.Query.delete` method is a "bulk" operation, + which bypasses ORM unit-of-work automation in favor of greater + performance. **Please read all caveats and warnings below.** + :param synchronize_session: chooses the strategy for the removal of matched objects from the session. Valid values are: @@ -2615,8 +3014,7 @@ def delete(self, synchronize_session='evaluate'): ``'evaluate'`` - Evaluate the query's criteria in Python straight on the objects in the session. If evaluation of the criteria isn't - implemented, an error is raised. In that case you probably - want to use the 'fetch' strategy as a fallback. + implemented, an error is raised. The expression evaluator currently doesn't account for differing string collations between the database and Python. @@ -2624,28 +3022,42 @@ def delete(self, synchronize_session='evaluate'): :return: the count of rows matched as returned by the database's "row count" feature. - This method has several key caveats: - - * The method does **not** offer in-Python cascading of relationships - it - is assumed that ON DELETE CASCADE/SET NULL/etc. is configured for any foreign key - references which require it, otherwise the database may emit an - integrity violation if foreign key references are being enforced. - - After the DELETE, dependent objects in the :class:`.Session` which - were impacted by an ON DELETE may not contain the current - state, or may have been deleted. This issue is resolved once the - :class:`.Session` is expired, - which normally occurs upon :meth:`.Session.commit` or can be forced - by using :meth:`.Session.expire_all`. Accessing an expired object - whose row has been deleted will invoke a SELECT to locate the - row; when the row is not found, an :class:`~sqlalchemy.orm.exc.ObjectDeletedError` - is raised. - - * The :meth:`.MapperEvents.before_delete` and - :meth:`.MapperEvents.after_delete` - events are **not** invoked from this method. Instead, the - :meth:`.SessionEvents.after_bulk_delete` method is provided to act - upon a mass DELETE of entity rows. + .. warning:: **Additional Caveats for bulk query deletes** + + * The method does **not** offer in-Python cascading of + relationships - it is assumed that ON DELETE CASCADE/SET + NULL/etc. is configured for any foreign key references + which require it, otherwise the database may emit an + integrity violation if foreign key references are being + enforced. + + After the DELETE, dependent objects in the + :class:`.Session` which were impacted by an ON DELETE + may not contain the current state, or may have been + deleted. This issue is resolved once the + :class:`.Session` is expired, which normally occurs upon + :meth:`.Session.commit` or can be forced by using + :meth:`.Session.expire_all`. Accessing an expired + object whose row has been deleted will invoke a SELECT + to locate the row; when the row is not found, an + :class:`~sqlalchemy.orm.exc.ObjectDeletedError` is + raised. + + * The ``'fetch'`` strategy results in an additional + SELECT statement emitted and will significantly reduce + performance. + + * The ``'evaluate'`` strategy performs a scan of + all matching objects within the :class:`.Session`; if the + contents of the :class:`.Session` are expired, such as + via a proceeding :meth:`.Session.commit` call, **this will + result in SELECT queries emitted for every matching object**. + + * The :meth:`.MapperEvents.before_delete` and + :meth:`.MapperEvents.after_delete` + events **are not invoked** from this method. Instead, the + :meth:`.SessionEvents.after_bulk_delete` method is provided to + act upon a mass DELETE of entity rows. .. seealso:: @@ -2654,23 +3066,48 @@ def delete(self, synchronize_session='evaluate'): :ref:`inserts_and_updates` - Core SQL tutorial """ - #TODO: cascades need handling. + # TODO: cascades need handling. delete_op = persistence.BulkDelete.factory( - self, synchronize_session) + self, synchronize_session) delete_op.exec_() return delete_op.rowcount - def update(self, values, synchronize_session='evaluate'): + def update(self, values, synchronize_session='evaluate', update_args=None): """Perform a bulk update query. Updates rows matched by this query in the database. - :param values: a dictionary with attributes names as keys and literal - values or sql expressions as values. + E.g.:: + + sess.query(User).filter(User.age == 25).\\ + update({User.age: User.age - 10}, synchronize_session=False) + + sess.query(User).filter(User.age == 25).\\ + update({"age": User.age - 10}, synchronize_session='evaluate') + + + .. warning:: The :meth:`.Query.update` method is a "bulk" operation, + which bypasses ORM unit-of-work automation in favor of greater + performance. **Please read all caveats and warnings below.** + + + :param values: a dictionary with attributes names, or alternatively + mapped attributes or SQL expressions, as keys, and literal + values or sql expressions as values. If :ref:`parameter-ordered + mode ` is desired, the values can be + passed as a list of 2-tuples; + this requires that the :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order` + flag is passed to the :paramref:`.Query.update.update_args` dictionary + as well. + + .. versionchanged:: 1.0.0 - string names in the values dictionary + are now resolved against the mapped entity; previously, these + strings were passed as literal column names with no mapper-level + translation. :param synchronize_session: chooses the strategy to update the - attributes on objects in the session. Valid values are: + attributes on objects in the session. Valid values are: ``False`` - don't synchronize the session. This option is the most efficient and is reliable once the session is expired, which @@ -2690,44 +3127,65 @@ def update(self, values, synchronize_session='evaluate'): The expression evaluator currently doesn't account for differing string collations between the database and Python. - :return: the count of rows matched as returned by the database's - "row count" feature. + :param update_args: Optional dictionary, if present will be passed + to the underlying :func:`.update` construct as the ``**kw`` for + the object. May be used to pass dialect-specific arguments such + as ``mysql_limit``, as well as other special arguments such as + :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order`. - This method has several key caveats: - - * The method does **not** offer in-Python cascading of relationships - it - is assumed that ON UPDATE CASCADE is configured for any foreign key - references which require it, otherwise the database may emit an - integrity violation if foreign key references are being enforced. - - After the UPDATE, dependent objects in the :class:`.Session` which - were impacted by an ON UPDATE CASCADE may not contain the current - state; this issue is resolved once the :class:`.Session` is expired, - which normally occurs upon :meth:`.Session.commit` or can be forced - by using :meth:`.Session.expire_all`. - - * As of 0.8, this method will support multiple table updates, as detailed - in :ref:`multi_table_updates`, and this behavior does extend to support - updates of joined-inheritance and other multiple table mappings. However, - the **join condition of an inheritance mapper is currently not - automatically rendered**. - Care must be taken in any multiple-table update to explicitly include - the joining condition between those tables, even in mappings where - this is normally automatic. - E.g. if a class ``Engineer`` subclasses ``Employee``, an UPDATE of the - ``Engineer`` local table using criteria against the ``Employee`` - local table might look like:: - - session.query(Engineer).\\ - filter(Engineer.id == Employee.id).\\ - filter(Employee.name == 'dilbert').\\ - update({"engineer_type": "programmer"}) - - * The :meth:`.MapperEvents.before_update` and - :meth:`.MapperEvents.after_update` - events are **not** invoked from this method. Instead, the - :meth:`.SessionEvents.after_bulk_update` method is provided to act - upon a mass UPDATE of entity rows. + .. versionadded:: 1.0.0 + + :return: the count of rows matched as returned by the database's + "row count" feature. + + .. warning:: **Additional Caveats for bulk query updates** + + * The method does **not** offer in-Python cascading of + relationships - it is assumed that ON UPDATE CASCADE is + configured for any foreign key references which require + it, otherwise the database may emit an integrity + violation if foreign key references are being enforced. + + After the UPDATE, dependent objects in the + :class:`.Session` which were impacted by an ON UPDATE + CASCADE may not contain the current state; this issue is + resolved once the :class:`.Session` is expired, which + normally occurs upon :meth:`.Session.commit` or can be + forced by using :meth:`.Session.expire_all`. + + * The ``'fetch'`` strategy results in an additional + SELECT statement emitted and will significantly reduce + performance. + + * The ``'evaluate'`` strategy performs a scan of + all matching objects within the :class:`.Session`; if the + contents of the :class:`.Session` are expired, such as + via a proceeding :meth:`.Session.commit` call, **this will + result in SELECT queries emitted for every matching object**. + + * The method supports multiple table updates, as detailed + in :ref:`multi_table_updates`, and this behavior does + extend to support updates of joined-inheritance and + other multiple table mappings. However, the **join + condition of an inheritance mapper is not + automatically rendered**. Care must be taken in any + multiple-table update to explicitly include the joining + condition between those tables, even in mappings where + this is normally automatic. E.g. if a class ``Engineer`` + subclasses ``Employee``, an UPDATE of the ``Engineer`` + local table using criteria against the ``Employee`` + local table might look like:: + + session.query(Engineer).\\ + filter(Engineer.id == Employee.id).\\ + filter(Employee.name == 'dilbert').\\ + update({"engineer_type": "programmer"}) + + * The :meth:`.MapperEvents.before_update` and + :meth:`.MapperEvents.after_update` + events **are not invoked from this method**. Instead, the + :meth:`.SessionEvents.after_bulk_update` method is provided to + act upon a mass UPDATE of entity rows. .. seealso:: @@ -2737,19 +3195,19 @@ def update(self, values, synchronize_session='evaluate'): """ - #TODO: value keys need to be mapped to corresponding sql cols and - # instr.attr.s to string keys - #TODO: updates of manytoone relationships need to be converted to - # fk assignments - #TODO: cascades need handling. - + update_args = update_args or {} update_op = persistence.BulkUpdate.factory( - self, synchronize_session, values) + self, synchronize_session, values, update_args) update_op.exec_() return update_op.rowcount - def _compile_context(self, labels=True): + if self.dispatch.before_compile: + for fn in self.dispatch.before_compile: + new_query = fn(self) + if new_query is not None: + self = new_query + context = QueryContext(self) if context.statement is not None: @@ -2770,10 +3228,8 @@ def _compile_context(self, labels=True): # "load from explicit FROMs" mode, # i.e. when select_from() or join() is used context.froms = list(context.from_clause) - else: - # "load from discrete FROMs" mode, - # i.e. when each _MappedEntity has its own FROM - context.froms = context.froms + # else "load from discrete FROMs" mode, + # i.e. when each _MappedEntity has its own FROM if self._enable_single_crit: self._adjust_for_single_inheritance(context) @@ -2781,18 +3237,19 @@ def _compile_context(self, labels=True): if not context.primary_columns: if self._only_load_props: raise sa_exc.InvalidRequestError( - "No column-based properties specified for " - "refresh operation. Use session.expire() " - "to reload collections and related items.") + "No column-based properties specified for " + "refresh operation. Use session.expire() " + "to reload collections and related items.") else: raise sa_exc.InvalidRequestError( - "Query contains no columns with which to " - "SELECT from.") + "Query contains no columns with which to " + "SELECT from.") if context.multi_row_eager_loaders and self._should_nest_selectable: context.statement = self._compound_eager_statement(context) else: context.statement = self._simple_statement(context) + return context def _compound_eager_statement(self, context): @@ -2802,26 +3259,26 @@ def _compound_eager_statement(self, context): if context.order_by: order_by_col_expr = list( - chain(*[ - sql_util.unwrap_order_by(o) - for o in context.order_by - ]) - ) + chain(*[ + sql_util.unwrap_order_by(o) + for o in context.order_by + ]) + ) else: context.order_by = None order_by_col_expr = [] inner = sql.select( - context.primary_columns + order_by_col_expr, - context.whereclause, - from_obj=context.froms, - use_labels=context.labels, - # TODO: this order_by is only needed if - # LIMIT/OFFSET is present in self._select_args, - # else the application on the outside is enough - order_by=context.order_by, - **self._select_args - ) + context.primary_columns + order_by_col_expr, + context.whereclause, + from_obj=context.froms, + use_labels=context.labels, + # TODO: this order_by is only needed if + # LIMIT/OFFSET is present in self._select_args, + # else the application on the outside is enough + order_by=context.order_by, + **self._select_args + ) for hint in self._with_hints: inner = inner.with_hint(*hint) @@ -2836,8 +3293,8 @@ def _compound_eager_statement(self, context): context.adapter = sql_util.ColumnAdapter(inner, equivs) statement = sql.select( - [inner] + context.secondary_columns, - use_labels=context.labels) + [inner] + context.secondary_columns, + use_labels=context.labels) statement._for_update_arg = context._for_update_arg @@ -2847,8 +3304,8 @@ def _compound_eager_statement(self, context): # giving us a marker as to where the "splice point" of # the join should be from_clause = sql_util.splice_joins( - from_clause, - eager_join, eager_join.stop_on) + from_clause, + eager_join, eager_join.stop_on) statement.append_from(from_clause) @@ -2868,25 +3325,24 @@ def _simple_statement(self, context): if self._distinct and context.order_by: order_by_col_expr = list( - chain(*[ - sql_util.unwrap_order_by(o) - for o in context.order_by - ]) - ) + chain(*[ + sql_util.unwrap_order_by(o) + for o in context.order_by + ]) + ) context.primary_columns += order_by_col_expr context.froms += tuple(context.eager_joins.values()) statement = sql.select( - context.primary_columns + - context.secondary_columns, - context.whereclause, - from_obj=context.froms, - use_labels=context.labels, - order_by=context.order_by, - **self._select_args - ) - + context.primary_columns + + context.secondary_columns, + context.whereclause, + from_obj=context.froms, + use_labels=context.labels, + order_by=context.order_by, + **self._select_args + ) statement._for_update_arg = context._for_update_arg for hint in self._with_hints: @@ -2908,7 +3364,8 @@ def _adjust_for_single_inheritance(self, context): subtypes are selected from the total results. """ - for (ext_info, adapter) in self._mapper_adapter_map.values(): + + for (ext_info, adapter) in set(self._mapper_adapter_map.values()): if ext_info in self._join_entities: continue single_crit = ext_info.mapper._single_table_criterion @@ -2917,14 +3374,15 @@ def _adjust_for_single_inheritance(self, context): single_crit = adapter.traverse(single_crit) single_crit = self._adapt_clause(single_crit, False, False) context.whereclause = sql.and_( - sql.True_._ifnone(context.whereclause), - single_crit) + sql.True_._ifnone(context.whereclause), + single_crit) def __str__(self): return str(self._compile_context().statement) from ..sql.selectable import ForUpdateArg + class LockmodeArg(ForUpdateArg): @classmethod def parse_legacy_query(self, mode): @@ -2941,10 +3399,11 @@ def parse_legacy_query(self, mode): read = False else: raise sa_exc.ArgumentError( - "Unknown with_lockmode argument: %r" % mode) + "Unknown with_lockmode argument: %r" % mode) return LockmodeArg(read=read, nowait=nowait) + class _QueryEntity(object): """represent an entity column returned within a Query result.""" @@ -2952,7 +3411,7 @@ def __new__(cls, *args, **kwargs): if cls is _QueryEntity: entity = args[1] if not isinstance(entity, util.string_types) and \ - _is_mapped_class(entity): + _is_mapped_class(entity): cls = _MapperEntity elif isinstance(entity, Bundle): cls = _BundleEntity @@ -2986,17 +3445,16 @@ def setup_entity(self, ext_info, aliased_adapter): self.is_aliased_class = ext_info.is_aliased_class self._with_polymorphic = ext_info.with_polymorphic_mappers self._polymorphic_discriminator = \ - ext_info.polymorphic_on + ext_info.polymorphic_on self.entity_zero = ext_info if ext_info.is_aliased_class: self._label_name = self.entity_zero.name else: self._label_name = self.mapper.class_.__name__ self.path = self.entity_zero._path_registry - self.custom_rows = bool(self.mapper.dispatch.append_result) def set_with_polymorphic(self, query, cls_or_mappers, - selectable, polymorphic_on): + selectable, polymorphic_on): """Receive an update from a call to query.with_polymorphic(). Note the newer style of using a free standing with_polymporphic() @@ -3007,23 +3465,23 @@ def set_with_polymorphic(self, query, cls_or_mappers, if self.is_aliased_class: # TODO: invalidrequest ? raise NotImplementedError( - "Can't use with_polymorphic() against " - "an Aliased object" - ) + "Can't use with_polymorphic() against " + "an Aliased object" + ) if cls_or_mappers is None: query._reset_polymorphic_adapter(self.mapper) return mappers, from_obj = self.mapper._with_polymorphic_args( - cls_or_mappers, selectable) + cls_or_mappers, selectable) self._with_polymorphic = mappers self._polymorphic_discriminator = polymorphic_on self.selectable = from_obj - query._mapper_loads_polymorphically_with(self.mapper, - sql_util.ColumnAdapter(from_obj, - self.mapper._equivalent_columns)) + query._mapper_loads_polymorphically_with( + self.mapper, sql_util.ColumnAdapter( + from_obj, self.mapper._equivalent_columns)) filter_fn = id @@ -3072,7 +3530,7 @@ def _get_entity_clauses(self, query, context): return ret - def row_processor(self, query, context, custom_rows): + def row_processor(self, query, context, result): adapter = self._get_entity_clauses(query, context) if context.adapter and adapter: @@ -3089,30 +3547,28 @@ def row_processor(self, query, context, custom_rows): self.mapper._equivalent_columns) if query._primary_entity is self: - _instance = loading.instance_processor( - self.mapper, - context, - self.path, - adapter, - only_load_props=query._only_load_props, - refresh_state=context.refresh_state, - polymorphic_discriminator=self._polymorphic_discriminator - ) + only_load_props = query._only_load_props + refresh_state = context.refresh_state else: - _instance = loading.instance_processor( - self.mapper, - context, - self.path, - adapter, - polymorphic_discriminator=self._polymorphic_discriminator - ) + only_load_props = refresh_state = None + + _instance = loading._instance_processor( + self.mapper, + context, + result, + self.path, + adapter, + only_load_props=only_load_props, + refresh_state=refresh_state, + polymorphic_discriminator=self._polymorphic_discriminator + ) return _instance, self._label_name def setup_context(self, query, context): adapter = self._get_entity_clauses(query, context) - #if self._adapted_selectable is None: + # if self._adapted_selectable is None: context.froms += (self.selectable,) if context.order_by is False and self.mapper.order_by: @@ -3121,45 +3577,24 @@ def setup_context(self, query, context): # apply adaptation to the mapper's order_by if needed. if adapter: context.order_by = adapter.adapt_list( - util.to_list( - context.order_by - ) - ) - - if self._with_polymorphic: - poly_properties = self.mapper._iterate_polymorphic_properties( - self._with_polymorphic) - else: - poly_properties = self.mapper._polymorphic_properties - - for value in poly_properties: - if query._only_load_props and \ - value.key not in query._only_load_props: - continue - value.setup( - context, - self, - self.path, - adapter, - only_load_props=query._only_load_props, - column_collection=context.primary_columns - ) - - if self._polymorphic_discriminator is not None and \ - self._polymorphic_discriminator \ - is not self.mapper.polymorphic_on: + util.to_list( + context.order_by + ) + ) - if adapter: - pd = adapter.columns[self._polymorphic_discriminator] - else: - pd = self._polymorphic_discriminator - context.primary_columns.append(pd) + loading._setup_entity_query( + context, self.mapper, self, + self.path, adapter, context.primary_columns, + with_polymorphic=self._with_polymorphic, + only_load_props=query._only_load_props, + polymorphic_discriminator=self._polymorphic_discriminator) def __str__(self): return str(self.mapper) + @inspection._self_inspects -class Bundle(object): +class Bundle(InspectionAttr): """A grouping of SQL expressions that are returned by a :class:`.Query` under one namespace. @@ -3182,6 +3617,12 @@ class Bundle(object): """If True, queries for a single Bundle will be returned as a single entity, rather than an element within a keyed tuple.""" + is_clause_element = False + + is_mapper = False + + is_aliased_class = False + def __init__(self, name, *exprs, **kw): """Construct a new :class:`.Bundle`. @@ -3189,7 +3630,8 @@ def __init__(self, name, *exprs, **kw): bn = Bundle("mybundle", MyClass.x, MyClass.y) - for row in session.query(bn).filter(bn.c.x == 5).filter(bn.c.y == 4): + for row in session.query(bn).filter( + bn.c.x == 5).filter(bn.c.y == 4): print(row.mybundle.x, row.mybundle.y) :param name: name of the bundle. @@ -3203,7 +3645,7 @@ def __init__(self, name, *exprs, **kw): self.exprs = exprs self.c = self.columns = ColumnCollection() self.columns.update((getattr(col, "key", col._label), col) - for col in exprs) + for col in exprs) self.single_entity = kw.pop('single_entity', self.single_entity) columns = None @@ -3222,7 +3664,8 @@ def __init__(self, name, *exprs, **kw): Bundle('b3', MyClass.x, MyClass.y) ) - q = sess.query(b1).filter(b1.c.b2.c.a == 5).filter(b1.c.b3.c.y == 9) + q = sess.query(b1).filter( + b1.c.b2.c.a == 5).filter(b1.c.b3.c.y == 9) .. seealso:: @@ -3262,8 +3705,10 @@ def create_row_processor(self, query, procs, labels): :ref:`bundles` - includes an example of subclassing. """ - def proc(row, result): - return util.KeyedTuple([proc(row, None) for proc in procs], labels) + keyed_tuple = util.lightweight_named_tuple('result', labels) + + def proc(row): + return keyed_tuple([proc(row) for proc in procs]) return proc @@ -3288,8 +3733,6 @@ def __init__(self, query, bundle, setup_entities=True): self.supports_single_entity = self.bundle.single_entity - custom_rows = False - @property def entity_zero(self): for ent in self._entities: @@ -3315,9 +3758,9 @@ def entity_zero_or_selectable(self): def adapt_to_selectable(self, query, sel): c = _BundleEntity(query, self.bundle, setup_entities=False) - #c._label_name = self._label_name - #c.entity_zero = self.entity_zero - #c.entities = self.entities + # c._label_name = self._label_name + # c.entity_zero = self.entity_zero + # c.entities = self.entities for ent in self._entities: ent.adapt_to_selectable(c, sel) @@ -3330,52 +3773,64 @@ def setup_context(self, query, context): for ent in self._entities: ent.setup_context(query, context) - def row_processor(self, query, context, custom_rows): + def row_processor(self, query, context, result): procs, labels = zip( - *[ent.row_processor(query, context, custom_rows) - for ent in self._entities] - ) + *[ent.row_processor(query, context, result) + for ent in self._entities] + ) proc = self.bundle.create_row_processor(query, procs, labels) return proc, self._label_name + class _ColumnEntity(_QueryEntity): """Column/expression based entity.""" def __init__(self, query, column, namespace=None): self.expr = column self.namespace = namespace + search_entities = True + check_column = False if isinstance(column, util.string_types): column = sql.literal_column(column) self._label_name = column.name + search_entities = False + check_column = True + _entity = None elif isinstance(column, ( - attributes.QueryableAttribute, - interfaces.PropComparator - )): + attributes.QueryableAttribute, + interfaces.PropComparator + )): + _entity = getattr(column, '_parententity', None) + if _entity is not None: + search_entities = False self._label_name = column.key column = column._query_clause_element() - else: - self._label_name = getattr(column, 'key', None) - - if not isinstance(column, expression.ColumnElement) and \ - hasattr(column, '_select_iterable'): - for c in column._select_iterable: - if c is column: - break - _ColumnEntity(query, c, namespace=column) - else: + check_column = True + if isinstance(column, Bundle): + _BundleEntity(query, column) return - elif isinstance(column, Bundle): - _BundleEntity(query, column) - return if not isinstance(column, sql.ColumnElement): + if hasattr(column, '_select_iterable'): + # break out an object like Table into + # individual columns + for c in column._select_iterable: + if c is column: + break + _ColumnEntity(query, c, namespace=column) + else: + return + raise sa_exc.InvalidRequestError( "SQL expression, column, or mapped entity " "expected - got '%r'" % (column, ) ) + elif not check_column: + self._label_name = getattr(column, 'key', None) + search_entities = True self.type = type_ = column.type if type_.hashable: @@ -3406,22 +3861,39 @@ def __init__(self, query, column, namespace=None): # leaking out their entities into the main select construct self.actual_froms = actual_froms = set(column._from_objects) - self.entities = util.OrderedSet( - elem._annotations['parententity'] - for elem in visitors.iterate(column, {}) - if 'parententity' in elem._annotations - and actual_froms.intersection(elem._from_objects) - ) - - if self.entities: - self.entity_zero = list(self.entities)[0] - elif self.namespace is not None: - self.entity_zero = self.namespace + if not search_entities: + self.entity_zero = _entity + if _entity: + self.entities = [_entity] + else: + self.entities = [] + self._from_entities = set(self.entities) else: - self.entity_zero = None + all_elements = [ + elem for elem in visitors.iterate(column, {}) + if 'parententity' in elem._annotations + ] + + self.entities = util.unique_list([ + elem._annotations['parententity'] + for elem in all_elements + if 'parententity' in elem._annotations + ]) + + self._from_entities = set([ + elem._annotations['parententity'] + for elem in all_elements + if 'parententity' in elem._annotations + and actual_froms.intersection(elem._from_objects) + ]) + if self.entities: + self.entity_zero = self.entities[0] + elif self.namespace is not None: + self.entity_zero = self.namespace + else: + self.entity_zero = None supports_single_entity = False - custom_rows = False @property def entity_zero_or_selectable(self): @@ -3441,7 +3913,9 @@ def adapt_to_selectable(self, query, sel): def setup_entity(self, ext_info, aliased_adapter): if 'selectable' not in self.__dict__: self.selectable = ext_info.selectable - self.froms.add(ext_info.selectable) + + if self.actual_froms.intersection(ext_info.selectable._from_objects): + self.froms.add(ext_info.selectable) def corresponds_to(self, entity): # TODO: just returning False here, @@ -3453,45 +3927,48 @@ def corresponds_to(self, entity): return entity is self.entity_zero else: return not _is_aliased_class(self.entity_zero) and \ - entity.common_parent(self.entity_zero) - - def _resolve_expr_against_query_aliases(self, query, expr, context): - return query._adapt_clause(expr, False, True) + entity.common_parent(self.entity_zero) - def row_processor(self, query, context, custom_rows): - column = self._resolve_expr_against_query_aliases( - query, self.column, context) + def row_processor(self, query, context, result): + if ('fetch_column', self) in context.attributes: + column = context.attributes[('fetch_column', self)] + else: + column = query._adapt_clause(self.column, False, True) if context.adapter: column = context.adapter.columns[column] - def proc(row, result): - return row[column] - - return proc, self._label_name + getter = result._getter(column) + return getter, self._label_name def setup_context(self, query, context): - column = self._resolve_expr_against_query_aliases( - query, self.column, context) + column = query._adapt_clause(self.column, False, True) context.froms += tuple(self.froms) context.primary_columns.append(column) + context.attributes[('fetch_column', self)] = column + def __str__(self): return str(self.column) class QueryContext(object): - multi_row_eager_loaders = False - adapter = None - froms = () - for_update = None + __slots__ = ( + 'multi_row_eager_loaders', 'adapter', 'froms', 'for_update', + 'query', 'session', 'autoflush', 'populate_existing', + 'invoke_all_eagers', 'version_check', 'refresh_state', + 'primary_columns', 'secondary_columns', 'eager_order_by', + 'eager_joins', 'create_eager_joins', 'propagate_options', + 'attributes', 'statement', 'from_clause', 'whereclause', + 'order_by', 'labels', '_for_update_arg', 'runid', 'partials' + ) def __init__(self, query): if query._statement is not None: if isinstance(query._statement, expression.SelectBase) and \ - not query._statement._textual and \ - not query._statement.use_labels: + not query._statement._textual and \ + not query._statement.use_labels: self.statement = query._statement.apply_labels() else: self.statement = query._statement @@ -3501,8 +3978,13 @@ def __init__(self, query): self.whereclause = query._criterion self.order_by = query._order_by + self.multi_row_eager_loaders = False + self.adapter = None + self.froms = () + self.for_update = None self.query = query self.session = query.session + self.autoflush = query._autoflush self.populate_existing = query._populate_existing self.invoke_all_eagers = query._invoke_all_eagers self.version_check = query._version_check @@ -3513,7 +3995,7 @@ def __init__(self, query): self.eager_joins = {} self.create_eager_joins = [] self.propagate_options = set(o for o in query._with_options if - o.propagate_to_loaders) + o.propagate_to_loaders) self.attributes = query._attributes.copy() @@ -3560,5 +4042,3 @@ def process_query(self, query): else: alias = self.alias query._from_obj_alias = sql_util.ColumnAdapter(alias) - - diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py index 311fba4786..c58dd98fb6 100644 --- a/lib/sqlalchemy/orm/relationships.py +++ b/lib/sqlalchemy/orm/relationships.py @@ -1,5 +1,6 @@ # orm/relationships.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -12,21 +13,25 @@ and `secondaryjoin` aspects of :func:`.relationship`. """ - +from __future__ import absolute_import from .. import sql, util, exc as sa_exc, schema, log +import weakref from .util import CascadeOptions, _orm_annotate, _orm_deannotate from . import dependency from . import attributes from ..sql.util import ( ClauseAdapter, join_condition, _shallow_annotate, visit_binary_product, - _deep_deannotate, selectables_overlap - ) + _deep_deannotate, selectables_overlap, adapt_criterion_to_null +) from ..sql import operators, expression, visitors -from .interfaces import MANYTOMANY, MANYTOONE, ONETOMANY, StrategizedProperty, PropComparator +from .interfaces import (MANYTOMANY, MANYTOONE, ONETOMANY, + StrategizedProperty, PropComparator) from ..inspection import inspect from . import mapper as mapperlib +import collections + def remote(expr): """Annotate a portion of a primaryjoin expression @@ -45,7 +50,7 @@ def remote(expr): """ return _annotate_columns(expression._clause_element_as_expr(expr), - {"remote": True}) + {"remote": True}) def foreign(expr): @@ -66,7 +71,7 @@ def foreign(expr): """ return _annotate_columns(expression._clause_element_as_expr(expr), - {"foreign": True}) + {"foreign": True}) @log.class_logger @@ -88,33 +93,35 @@ class RelationshipProperty(StrategizedProperty): _dependency_processor = None def __init__(self, argument, - secondary=None, primaryjoin=None, - secondaryjoin=None, - foreign_keys=None, - uselist=None, - order_by=False, - backref=None, - back_populates=None, - post_update=False, - cascade=False, extension=None, - viewonly=False, lazy=True, - collection_class=None, passive_deletes=False, - passive_updates=True, remote_side=None, - enable_typechecks=True, join_depth=None, - comparator_factory=None, - single_parent=False, innerjoin=False, - distinct_target_key=None, - doc=None, - active_history=False, - cascade_backrefs=True, - load_on_pending=False, - strategy_class=None, _local_remote_pairs=None, - query_class=None, - info=None): + secondary=None, primaryjoin=None, + secondaryjoin=None, + foreign_keys=None, + uselist=None, + order_by=False, + backref=None, + back_populates=None, + post_update=False, + cascade=False, extension=None, + viewonly=False, lazy=True, + collection_class=None, passive_deletes=False, + passive_updates=True, remote_side=None, + enable_typechecks=True, join_depth=None, + comparator_factory=None, + single_parent=False, innerjoin=False, + distinct_target_key=None, + doc=None, + active_history=False, + cascade_backrefs=True, + load_on_pending=False, + bake_queries=True, + strategy_class=None, _local_remote_pairs=None, + query_class=None, + info=None): """Provide a relationship between two mapped classes. - This corresponds to a parent-child or associative table relationship. The - constructed class is an instance of :class:`.RelationshipProperty`. + This corresponds to a parent-child or associative table relationship. + The constructed class is an instance of + :class:`.RelationshipProperty`. A typical :func:`.relationship`, used in a classical mapping:: @@ -125,10 +132,11 @@ def __init__(self, argument, Some arguments accepted by :func:`.relationship` optionally accept a callable function, which when called produces the desired value. The callable is invoked by the parent :class:`.Mapper` at "mapper - initialization" time, which happens only when mappers are first used, and - is assumed to be after all mappings have been constructed. This can be - used to resolve order-of-declaration and other dependency issues, such as - if ``Child`` is declared below ``Parent`` in the same file:: + initialization" time, which happens only when mappers are first used, + and is assumed to be after all mappings have been constructed. This + can be used to resolve order-of-declaration and other dependency + issues, such as if ``Child`` is declared below ``Parent`` in the same + file:: mapper(Parent, properties={ "children":relationship(lambda: Child, @@ -136,12 +144,12 @@ def __init__(self, argument, }) When using the :ref:`declarative_toplevel` extension, the Declarative - initializer allows string arguments to be passed to :func:`.relationship`. - These string arguments are converted into callables that evaluate - the string as Python code, using the Declarative - class-registry as a namespace. This allows the lookup of related - classes to be automatic via their string name, and removes the need to - import related classes at all into the local module space:: + initializer allows string arguments to be passed to + :func:`.relationship`. These string arguments are converted into + callables that evaluate the string as Python code, using the + Declarative class-registry as a namespace. This allows the lookup of + related classes to be automatic via their string name, and removes the + need to import related classes at all into the local module space:: from sqlalchemy.ext.declarative import declarative_base @@ -154,18 +162,18 @@ class Parent(Base): .. seealso:: - :ref:`relationship_config_toplevel` - Full introductory and reference - documentation for :func:`.relationship`. + :ref:`relationship_config_toplevel` - Full introductory and + reference documentation for :func:`.relationship`. :ref:`orm_tutorial_relationship` - ORM tutorial introduction. :param argument: - a mapped class, or actual :class:`.Mapper` instance, representing the - target of the relationship. + a mapped class, or actual :class:`.Mapper` instance, representing + the target of the relationship. - :paramref:`~.relationship.argument` may also be passed as a callable function - which is evaluated at mapper initialization time, and may be passed as a - Python-evaluable string when using Declarative. + :paramref:`~.relationship.argument` may also be passed as a callable + function which is evaluated at mapper initialization time, and may + be passed as a Python-evaluable string when using Declarative. .. seealso:: @@ -185,35 +193,37 @@ class Parent(Base): present in the :class:`.MetaData` collection associated with the parent-mapped :class:`.Table`. - The :paramref:`~.relationship.secondary` keyword argument is typically - applied in the case where the intermediary :class:`.Table` is not - otherwise exprssed in any direct class mapping. If the "secondary" table - is also explicitly mapped elsewhere - (e.g. as in :ref:`association_pattern`), one should consider applying - the :paramref:`~.relationship.viewonly` flag so that this :func:`.relationship` - is not used for persistence operations which may conflict with those - of the association object pattern. + The :paramref:`~.relationship.secondary` keyword argument is + typically applied in the case where the intermediary :class:`.Table` + is not otherwise expressed in any direct class mapping. If the + "secondary" table is also explicitly mapped elsewhere (e.g. as in + :ref:`association_pattern`), one should consider applying the + :paramref:`~.relationship.viewonly` flag so that this + :func:`.relationship` is not used for persistence operations which + may conflict with those of the association object pattern. .. seealso:: - :ref:`relationships_many_to_many` - Reference example of "many to many". + :ref:`relationships_many_to_many` - Reference example of "many + to many". :ref:`orm_tutorial_many_to_many` - ORM tutorial introduction to many-to-many relationships. - :ref:`self_referential_many_to_many` - Specifics on using many-to-many - in a self-referential case. + :ref:`self_referential_many_to_many` - Specifics on using + many-to-many in a self-referential case. :ref:`declarative_many_to_many` - Additional options when using Declarative. - :ref:`association_pattern` - an alternative to :paramref:`~.relationship.secondary` - when composing association table relationships, allowing additional - attributes to be specified on the association table. + :ref:`association_pattern` - an alternative to + :paramref:`~.relationship.secondary` when composing association + table relationships, allowing additional attributes to be + specified on the association table. - :ref:`composite_secondary_join` - a lesser-used pattern which in some - cases can enable complex :func:`.relationship` SQL conditions - to be used. + :ref:`composite_secondary_join` - a lesser-used pattern which + in some cases can enable complex :func:`.relationship` SQL + conditions to be used. .. versionadded:: 0.9.2 :paramref:`~.relationship.secondary` works more effectively when referring to a :class:`.Join` instance. @@ -249,11 +259,13 @@ class Parent(Base): :param back_populates: - Takes a string name and has the same meaning as :paramref:`~.relationship.backref`, - except the complementing property is **not** created automatically, - and instead must be configured explicitly on the other mapper. The - complementing property should also indicate :paramref:`~.relationship.back_populates` - to this relationship to ensure proper functioning. + Takes a string name and has the same meaning as + :paramref:`~.relationship.backref`, except the complementing + property is **not** created automatically, and instead must be + configured explicitly on the other mapper. The complementing + property should also indicate + :paramref:`~.relationship.back_populates` to this relationship to + ensure proper functioning. .. seealso:: @@ -263,6 +275,31 @@ class Parent(Base): :paramref:`~.relationship.backref` - alternative form of backref specification. + :param bake_queries=True: + Use the :class:`.BakedQuery` cache to cache the construction of SQL + used in lazy loads, when the :func:`.bake_lazy_loaders` function has + first been called. Defaults to True and is intended to provide an + "opt out" flag per-relationship when the baked query cache system is + in use. + + .. warning:: + + This flag **only** has an effect when the application-wide + :func:`.bake_lazy_loaders` function has been called. It + defaults to True so is an "opt out" flag. + + Setting this flag to False when baked queries are otherwise in + use might be to reduce + ORM memory use for this :func:`.relationship`, or to work around + unresolved stability issues observed within the baked query + cache system. + + .. versionadded:: 1.0.0 + + .. seealso:: + + :ref:`baked_toplevel` + :param cascade: a comma-separated list of cascade rules which determines how Session operations should be "cascaded" from parent to child. @@ -307,8 +344,9 @@ class Parent(Base): examples. :param comparator_factory: - a class which extends :class:`.RelationshipProperty.Comparator` which - provides custom SQL clause generation for comparison operations. + a class which extends :class:`.RelationshipProperty.Comparator` + which provides custom SQL clause generation for comparison + operations. .. seealso:: @@ -323,20 +361,21 @@ class Parent(Base): keyword to the innermost SELECT statement. When left as ``None``, the DISTINCT keyword will be applied in those cases when the target columns do not comprise the full primary key of the target table. - When set to ``True``, the DISTINCT keyword is applied to the innermost - SELECT unconditionally. + When set to ``True``, the DISTINCT keyword is applied to the + innermost SELECT unconditionally. It may be desirable to set this flag to False when the DISTINCT is reducing performance of the innermost subquery beyond that of what duplicate innermost rows may be causing. - .. versionadded:: 0.8.3 - :paramref:`~.relationship.distinct_target_key` - allows the + .. versionadded:: 0.8.3 - + :paramref:`~.relationship.distinct_target_key` allows the subquery eager loader to apply a DISTINCT modifier to the innermost SELECT. - .. versionchanged:: 0.9.0 - :paramref:`~.relationship.distinct_target_key` - now defaults to ``None``, so that the feature enables itself automatically for + .. versionchanged:: 0.9.0 - + :paramref:`~.relationship.distinct_target_key` now defaults to + ``None``, so that the feature enables itself automatically for those cases where the innermost query targets a non-unique key. @@ -385,8 +424,9 @@ class Parent(Base): .. versionchanged:: 0.8 A multiple-foreign key join ambiguity can be resolved by - setting the :paramref:`~.relationship.foreign_keys` parameter alone, without the - need to explicitly set :paramref:`~.relationship.primaryjoin` as well. + setting the :paramref:`~.relationship.foreign_keys` + parameter alone, without the need to explicitly set + :paramref:`~.relationship.primaryjoin` as well. 2. The :class:`.Table` being mapped does not actually have :class:`.ForeignKey` or :class:`.ForeignKeyConstraint` @@ -394,10 +434,11 @@ class Parent(Base): was reflected from a database that does not support foreign key reflection (MySQL MyISAM). - 3. The :paramref:`~.relationship.primaryjoin` argument is used to construct a non-standard - join condition, which makes use of columns or expressions that do - not normally refer to their "parent" column, such as a join condition - expressed by a complex comparison using a SQL function. + 3. The :paramref:`~.relationship.primaryjoin` argument is used to + construct a non-standard join condition, which makes use of + columns or expressions that do not normally refer to their + "parent" column, such as a join condition expressed by a + complex comparison using a SQL function. The :func:`.relationship` construct will raise informative error messages that suggest the use of the @@ -407,9 +448,10 @@ class Parent(Base): :paramref:`~.relationship.foreign_keys` parameter is usually not needed. - :paramref:`~.relationship.foreign_keys` may also be passed as a callable function - which is evaluated at mapper initialization time, and may be passed as a - Python-evaluable string when using Declarative. + :paramref:`~.relationship.foreign_keys` may also be passed as a + callable function which is evaluated at mapper initialization time, + and may be passed as a Python-evaluable string when using + Declarative. .. seealso:: @@ -417,14 +459,16 @@ class Parent(Base): :ref:`relationship_custom_foreign` - :func:`.foreign` - allows direct annotation of the "foreign" columns - within a :paramref:`~.relationship.primaryjoin` condition. + :func:`.foreign` - allows direct annotation of the "foreign" + columns within a :paramref:`~.relationship.primaryjoin` condition. .. versionadded:: 0.8 The :func:`.foreign` annotation can also be applied - directly to the :paramref:`~.relationship.primaryjoin` expression, which is an alternate, - more specific system of describing which columns in a particular - :paramref:`~.relationship.primaryjoin` should be considered "foreign". + directly to the :paramref:`~.relationship.primaryjoin` + expression, which is an alternate, more specific system of + describing which columns in a particular + :paramref:`~.relationship.primaryjoin` should be considered + "foreign". :param info: Optional data dictionary which will be populated into the :attr:`.MapperProperty.info` attribute of this object. @@ -438,25 +482,22 @@ class Parent(Base): generally perform better than outer joins. This flag can be set to ``True`` when the relationship references an - object via many-to-one using local foreign keys that are not nullable, - or when the reference is one-to-one or a collection that is guaranteed - to have one or at least one entry. - - If the joined-eager load is chained onto an existing LEFT OUTER JOIN, - ``innerjoin=True`` will be bypassed and the join will continue to - chain as LEFT OUTER JOIN so that the results don't change. As an alternative, - specify the value ``"nested"``. This will instead nest the join - on the right side, e.g. using the form "a LEFT OUTER JOIN (b JOIN c)". + object via many-to-one using local foreign keys that are not + nullable, or when the reference is one-to-one or a collection that + is guaranteed to have one or at least one entry. - .. versionadded:: 0.9.4 Added ``innerjoin="nested"`` option to support - nesting of eager "inner" joins. + The option supports the same "nested" and "unnested" options as + that of :paramref:`.joinedload.innerjoin`. See that flag + for details on nested / unnested behaviors. .. seealso:: + :paramref:`.joinedload.innerjoin` - the option as specified by + loader option, including detail on nesting behavior. + :ref:`what_kind_of_loading` - Discussion of some details of various loader options. - :paramref:`.joinedload.innerjoin` - loader option version :param join_depth: when non-``None``, an integer value indicating how many levels @@ -477,8 +518,8 @@ class Parent(Base): how the related items should be loaded. Default value is ``select``. Values include: - * ``select`` - items should be loaded lazily when the property is first - accessed, using a separate SELECT statement, or identity map + * ``select`` - items should be loaded lazily when the property is + first accessed, using a separate SELECT statement, or identity map fetch for simple many-to-one references. * ``immediate`` - items should be loaded as the parents are loaded, @@ -491,8 +532,9 @@ class Parent(Base): :paramref:`~.relationship.innerjoin` parameter. * ``subquery`` - items should be loaded "eagerly" as the parents are - loaded, using one additional SQL statement, which issues a JOIN to a - subquery of the original statement, for each collection requested. + loaded, using one additional SQL statement, which issues a JOIN to + a subquery of the original statement, for each collection + requested. * ``noload`` - no loading should occur at any time. This is to support "write-only" attributes, or attributes which are @@ -512,7 +554,7 @@ class Parent(Base): .. seealso:: - :doc:`/orm/loading` - Full documentation on relationship loader + :doc:`/orm/loading_relationships` - Full documentation on relationship loader configuration. :ref:`dynamic_relationship` - detail on the ``dynamic`` option. @@ -521,35 +563,35 @@ class Parent(Base): Indicates loading behavior for transient or pending parent objects. When set to ``True``, causes the lazy-loader to - issue a query for a parent object that is not persistent, meaning it has - never been flushed. This may take effect for a pending object when - autoflush is disabled, or for a transient object that has been + issue a query for a parent object that is not persistent, meaning it + has never been flushed. This may take effect for a pending object + when autoflush is disabled, or for a transient object that has been "attached" to a :class:`.Session` but is not part of its pending collection. - The :paramref:`~.relationship.load_on_pending` flag does not improve behavior - when the ORM is used normally - object references should be constructed - at the object level, not at the foreign key level, so that they - are present in an ordinary way before a flush proceeds. This flag - is not not intended for general use. + The :paramref:`~.relationship.load_on_pending` flag does not improve + behavior when the ORM is used normally - object references should be + constructed at the object level, not at the foreign key level, so + that they are present in an ordinary way before a flush proceeds. + This flag is not not intended for general use. .. seealso:: - :meth:`.Session.enable_relationship_loading` - this method establishes - "load on pending" behavior for the whole object, and also allows - loading on objects that remain transient or detached. + :meth:`.Session.enable_relationship_loading` - this method + establishes "load on pending" behavior for the whole object, and + also allows loading on objects that remain transient or + detached. :param order_by: indicates the ordering that should be applied when loading these - items. :paramref:`~.relationship.order_by` is expected to refer to one - of the :class:`.Column` - objects to which the target class is mapped, or - the attribute itself bound to the target class which refers - to the column. + items. :paramref:`~.relationship.order_by` is expected to refer to + one of the :class:`.Column` objects to which the target class is + mapped, or the attribute itself bound to the target class which + refers to the column. - :paramref:`~.relationship.order_by` may also be passed as a callable function - which is evaluated at mapper initialization time, and may be passed as a - Python-evaluable string when using Declarative. + :paramref:`~.relationship.order_by` may also be passed as a callable + function which is evaluated at mapper initialization time, and may + be passed as a Python-evaluable string when using Declarative. :param passive_deletes=False: Indicates loading behavior during delete operations. @@ -578,30 +620,26 @@ class Parent(Base): and examples. :param passive_updates=True: - Indicates loading and INSERT/UPDATE/DELETE behavior when the - source of a foreign key value changes (i.e. an "on update" - cascade), which are typically the primary key columns of the - source row. + Indicates the persistence behavior to take when a referenced + primary key value changes in place, indicating that the referencing + foreign key columns will also need their value changed. - When True, it is assumed that ON UPDATE CASCADE is configured on + When True, it is assumed that ``ON UPDATE CASCADE`` is configured on the foreign key in the database, and that the database will handle propagation of an UPDATE from a source column to - dependent rows. Note that with databases which enforce - referential integrity (i.e. PostgreSQL, MySQL with InnoDB tables), - ON UPDATE CASCADE is required for this operation. The - relationship() will update the value of the attribute on related - items which are locally present in the session during a flush. - - When False, it is assumed that the database does not enforce - referential integrity and will not be issuing its own CASCADE - operation for an update. The relationship() will issue the - appropriate UPDATE statements to the database in response to the - change of a referenced key, and items locally present in the - session during a flush will also be refreshed. - - This flag should probably be set to False if primary key changes - are expected and the database in use doesn't support CASCADE - (i.e. SQLite, MySQL MyISAM tables). + dependent rows. When False, the SQLAlchemy :func:`.relationship` + construct will attempt to emit its own UPDATE statements to + modify related targets. However note that SQLAlchemy **cannot** + emit an UPDATE for more than one level of cascade. Also, + setting this flag to False is not compatible in the case where + the database is in fact enforcing referential integrity, unless + those constraints are explicitly "deferred", if the target backend + supports it. + + It is highly advised that an application which is employing + mutable primary keys keeps ``passive_updates`` set to True, + and instead uses the referential integrity features of the database + itself in order to handle the change efficiently and fully. .. seealso:: @@ -638,12 +676,13 @@ class Parent(Base): join of this child object against the parent object, or in a many-to-many relationship the join of the primary object to the association table. By default, this value is computed based on the - foreign key relationships of the parent and child tables (or association - table). + foreign key relationships of the parent and child tables (or + association table). - :paramref:`~.relationship.primaryjoin` may also be passed as a callable function - which is evaluated at mapper initialization time, and may be passed as a - Python-evaluable string when using Declarative. + :paramref:`~.relationship.primaryjoin` may also be passed as a + callable function which is evaluated at mapper initialization time, + and may be passed as a Python-evaluable string when using + Declarative. .. seealso:: @@ -653,24 +692,25 @@ class Parent(Base): used for self-referential relationships, indicates the column or list of columns that form the "remote side" of the relationship. - :paramref:`.relationship.remote_side` may also be passed as a callable function - which is evaluated at mapper initialization time, and may be passed as a - Python-evaluable string when using Declarative. + :paramref:`.relationship.remote_side` may also be passed as a + callable function which is evaluated at mapper initialization time, + and may be passed as a Python-evaluable string when using + Declarative. .. versionchanged:: 0.8 The :func:`.remote` annotation can also be applied - directly to the ``primaryjoin`` expression, which is an alternate, - more specific system of describing which columns in a particular - ``primaryjoin`` should be considered "remote". + directly to the ``primaryjoin`` expression, which is an + alternate, more specific system of describing which columns in a + particular ``primaryjoin`` should be considered "remote". .. seealso:: - :ref:`self_referential` - in-depth explaination of how + :ref:`self_referential` - in-depth explanation of how :paramref:`~.relationship.remote_side` is used to configure self-referential relationships. - :func:`.remote` - an annotation function that accomplishes the same - purpose as :paramref:`~.relationship.remote_side`, typically + :func:`.remote` - an annotation function that accomplishes the + same purpose as :paramref:`~.relationship.remote_side`, typically when a custom :paramref:`~.relationship.primaryjoin` condition is used. @@ -683,18 +723,19 @@ class Parent(Base): .. seealso:: - :ref:`dynamic_relationship` - Introduction to "dynamic" relationship - loaders. + :ref:`dynamic_relationship` - Introduction to "dynamic" + relationship loaders. :param secondaryjoin: a SQL expression that will be used as the join of an association table to the child object. By default, this value is - computed based on the foreign key relationships of the association and - child tables. + computed based on the foreign key relationships of the association + and child tables. - :paramref:`~.relationship.secondaryjoin` may also be passed as a callable function - which is evaluated at mapper initialization time, and may be passed as a - Python-evaluable string when using Declarative. + :paramref:`~.relationship.secondaryjoin` may also be passed as a + callable function which is evaluated at mapper initialization time, + and may be passed as a Python-evaluable string when using + Declarative. .. seealso:: @@ -707,9 +748,9 @@ class Parent(Base): should be treated either as one-to-one or one-to-many. Its usage is optional, except for :func:`.relationship` constructs which are many-to-one or many-to-many and also - specify the ``delete-orphan`` cascade option. The :func:`.relationship` - construct itself will raise an error instructing when this option - is required. + specify the ``delete-orphan`` cascade option. The + :func:`.relationship` construct itself will raise an error + instructing when this option is required. .. seealso:: @@ -724,36 +765,39 @@ class Parent(Base): of the relationship - one to many forms a list, many to one forms a scalar, many to many is a list. If a scalar is desired where normally a list would be present, such as a bi-directional - one-to-one relationship, set :paramref:`~.relationship.uselist` to False. + one-to-one relationship, set :paramref:`~.relationship.uselist` to + False. The :paramref:`~.relationship.uselist` flag is also available on an - existing :func:`.relationship` construct as a read-only attribute, which - can be used to determine if this :func:`.relationship` deals with - collections or scalar attributes:: + existing :func:`.relationship` construct as a read-only attribute, + which can be used to determine if this :func:`.relationship` deals + with collections or scalar attributes:: >>> User.addresses.property.uselist True .. seealso:: - :ref:`relationships_one_to_one` - Introduction to the "one to one" - relationship pattern, which is typically when the + :ref:`relationships_one_to_one` - Introduction to the "one to + one" relationship pattern, which is typically when the :paramref:`~.relationship.uselist` flag is needed. :param viewonly=False: when set to True, the relationship is used only for loading objects, and not for any persistence operation. A :func:`.relationship` which specifies :paramref:`~.relationship.viewonly` can work - with a wider range of SQL operations within the :paramref:`~.relationship.primaryjoin` - condition, including operations that feature the use of - a variety of comparison operators as well as SQL functions such - as :func:`~.sql.expression.cast`. The :paramref:`~.relationship.viewonly` - flag is also of general use when defining any kind of :func:`~.relationship` - that doesn't represent the full set of related objects, to prevent - modifications of the collection from resulting in persistence operations. + with a wider range of SQL operations within the + :paramref:`~.relationship.primaryjoin` condition, including + operations that feature the use of a variety of comparison operators + as well as SQL functions such as :func:`~.sql.expression.cast`. The + :paramref:`~.relationship.viewonly` flag is also of general use when + defining any kind of :func:`~.relationship` that doesn't represent + the full set of related objects, to prevent modifications of the + collection from resulting in persistence operations. """ + super(RelationshipProperty, self).__init__() self.uselist = uselist self.argument = argument @@ -780,9 +824,10 @@ class Parent(Base): self.join_depth = join_depth self.local_remote_pairs = _local_remote_pairs self.extension = extension + self.bake_queries = bake_queries self.load_on_pending = load_on_pending self.comparator_factory = comparator_factory or \ - RelationshipProperty.Comparator + RelationshipProperty.Comparator self.comparator = self.comparator_factory(self, None) util.set_creation_order(self) @@ -797,7 +842,7 @@ class Parent(Base): self._reverse_property = set() self.cascade = cascade if cascade is not False \ - else "save-update, merge" + else "save-update, merge" self.order_by = order_by @@ -806,8 +851,8 @@ class Parent(Base): if self.back_populates: if backref: raise sa_exc.ArgumentError( - "backref and back_populates keyword arguments " - "are mutually exclusive") + "backref and back_populates keyword arguments " + "are mutually exclusive") self.backref = None else: self.backref = backref @@ -819,14 +864,14 @@ def instrument_class(self, mapper): comparator=self.comparator_factory(self, mapper), parententity=mapper, doc=self.doc, - ) + ) class Comparator(PropComparator): """Produce boolean, comparison, and other operators for :class:`.RelationshipProperty` attributes. - See the documentation for :class:`.PropComparator` for a brief overview - of ORM level operator definition. + See the documentation for :class:`.PropComparator` for a brief + overview of ORM level operator definition. See also: @@ -844,21 +889,22 @@ class Comparator(PropComparator): _of_type = None - def __init__(self, prop, parentmapper, adapt_to_entity=None, of_type=None): + def __init__( + self, prop, parentmapper, adapt_to_entity=None, of_type=None): """Construction of :class:`.RelationshipProperty.Comparator` is internal to the ORM's attribute mechanics. """ self.prop = prop - self._parentmapper = parentmapper + self._parententity = parentmapper self._adapt_to_entity = adapt_to_entity if of_type: self._of_type = of_type def adapt_to_entity(self, adapt_to_entity): - return self.__class__(self.property, self._parentmapper, + return self.__class__(self.property, self._parententity, adapt_to_entity=adapt_to_entity, - of_type=self._of_type) + of_type=self._of_type) @util.memoized_property def mapper(self): @@ -889,10 +935,10 @@ def __clause_element__(self): of_type = None pj, sj, source, dest, \ - secondary, target_adapter = self.property._create_joins( - source_selectable=adapt_from, - source_polymorphic=True, - of_type=of_type) + secondary, target_adapter = self.property._create_joins( + source_selectable=adapt_from, + source_polymorphic=True, + of_type=of_type) if sj is not None: return pj & sj else: @@ -907,10 +953,10 @@ def of_type(self, cls): """ return RelationshipProperty.Comparator( - self.property, - self._parentmapper, - adapt_to_entity=self._adapt_to_entity, - of_type=cls) + self.property, + self._parententity, + adapt_to_entity=self._adapt_to_entity, + of_type=cls) def in_(self, other): """Produce an IN clause - this is not implemented @@ -918,8 +964,9 @@ def in_(self, other): """ raise NotImplementedError('in_() not yet supported for ' - 'relationships. For a simple many-to-one, use ' - 'in_() against the set of foreign key values.') + 'relationships. For a simple ' + 'many-to-one, use in_() against ' + 'the set of foreign key values.') __hash__ = None @@ -965,21 +1012,23 @@ def __eq__(self, other): return ~self._criterion_exists() else: return _orm_annotate(self.property._optimized_compare( - None, adapt_source=self.adapter)) + None, adapt_source=self.adapter)) elif self.property.uselist: - raise sa_exc.InvalidRequestError("Can't compare a colle" - "ction to an object or collection; use " - "contains() to test for membership.") + raise sa_exc.InvalidRequestError( + "Can't compare a collection to an object or collection; " + "use contains() to test for membership.") else: - return _orm_annotate(self.property._optimized_compare(other, - adapt_source=self.adapter)) + return _orm_annotate( + self.property._optimized_compare( + other, adapt_source=self.adapter)) def _criterion_exists(self, criterion=None, **kwargs): if getattr(self, '_of_type', None): info = inspect(self._of_type) target_mapper, to_selectable, is_aliased_class = \ info.mapper, info.selectable, info.is_aliased_class - if self.property._is_self_referential and not is_aliased_class: + if self.property._is_self_referential and not \ + is_aliased_class: to_selectable = to_selectable.alias() single_crit = target_mapper._single_table_criterion @@ -998,9 +1047,10 @@ def _criterion_exists(self, criterion=None, **kwargs): source_selectable = None pj, sj, source, dest, secondary, target_adapter = \ - self.property._create_joins(dest_polymorphic=True, - dest_selectable=to_selectable, - source_selectable=source_selectable) + self.property._create_joins( + dest_polymorphic=True, + dest_selectable=to_selectable, + source_selectable=source_selectable) for k in kwargs: crit = getattr(self.property.mapper.class_, k) == kwargs[k] @@ -1017,7 +1067,8 @@ def _criterion_exists(self, criterion=None, **kwargs): else: j = _orm_annotate(pj, exclude=self.property.remote_side) - if criterion is not None and target_adapter and not is_aliased_class: + if criterion is not None and target_adapter and not \ + is_aliased_class: # limit this adapter to annotated only? criterion = target_adapter.traverse(criterion) @@ -1080,9 +1131,9 @@ def any(self, criterion=None, **kwargs): """ if not self.property.uselist: raise sa_exc.InvalidRequestError( - "'any()' not implemented for scalar " - "attributes. Use has()." - ) + "'any()' not implemented for scalar " + "attributes. Use has()." + ) return self._criterion_exists(criterion, **kwargs) @@ -1116,8 +1167,8 @@ def has(self, criterion=None, **kwargs): """ if self.property.uselist: raise sa_exc.InvalidRequestError( - "'has()' not implemented for collections. " - "Use any().") + "'has()' not implemented for collections. " + "Use any().") return self._criterion_exists(criterion, **kwargs) def contains(self, other, **kwargs): @@ -1178,10 +1229,10 @@ def contains(self, other, **kwargs): """ if not self.property.uselist: raise sa_exc.InvalidRequestError( - "'contains' not implemented for scalar " - "attributes. Use ==") - clause = self.property._optimized_compare(other, - adapt_source=self.adapter) + "'contains' not implemented for scalar " + "attributes. Use ==") + clause = self.property._optimized_compare( + other, adapt_source=self.adapter) if self.property.secondaryjoin is not None: clause.negation_clause = \ @@ -1194,9 +1245,15 @@ def __negated_contains_or_equals(self, other): state = attributes.instance_state(other) def state_bindparam(x, state, col): - o = state.obj() # strong ref - return sql.bindparam(x, unique=True, callable_=lambda: \ - self.property.mapper._get_committed_attr_by_column(o, col)) + dict_ = state.dict + return sql.bindparam( + x, unique=True, + callable_=self.property._get_attr_w_warn_on_none( + col, + self.property.mapper._get_state_attr_by_column, + state, dict_, col, passive=attributes.PASSIVE_OFF + ) + ) def adapt(col): if self.adapter: @@ -1211,13 +1268,14 @@ def adapt(col): adapt(x) == None) for (x, y) in self.property.local_remote_pairs]) - criterion = sql.and_(*[x == y for (x, y) in - zip( - self.property.mapper.primary_key, - self.property.\ - mapper.\ - primary_key_from_instance(other)) - ]) + criterion = sql.and_(*[ + x == y for (x, y) in + zip( + self.property.mapper.primary_key, + self.property.mapper.primary_key_from_instance(other) + ) + ]) + return ~self._criterion_exists(criterion) def __ne__(self, other): @@ -1261,16 +1319,18 @@ def __ne__(self, other): """ if isinstance(other, (util.NoneType, expression.Null)): if self.property.direction == MANYTOONE: - return sql.or_(*[x != None for x in - self.property._calculated_foreign_keys]) + return _orm_annotate(~self.property._optimized_compare( + None, adapt_source=self.adapter)) + else: return self._criterion_exists() elif self.property.uselist: - raise sa_exc.InvalidRequestError("Can't compare a collection" - " to an object or collection; use " - "contains() to test for membership.") + raise sa_exc.InvalidRequestError( + "Can't compare a collection" + " to an object or collection; use " + "contains() to test for membership.") else: - return self.__negated_contains_or_equals(other) + return _orm_annotate(self.__negated_contains_or_equals(other)) @util.memoized_property def property(self): @@ -1278,51 +1338,106 @@ def property(self): mapperlib.Mapper._configure_all() return self.prop - def compare(self, op, value, - value_is_parent=False, - alias_secondary=True): - if op == operators.eq: + def _with_parent(self, instance, alias_secondary=True): + assert instance is not None + return self._optimized_compare( + instance, value_is_parent=True, alias_secondary=alias_secondary) + + def _optimized_compare(self, state, value_is_parent=False, + adapt_source=None, + alias_secondary=True): + if state is not None: + state = attributes.instance_state(state) + + reverse_direction = not value_is_parent + + if state is None: + return self._lazy_none_clause( + reverse_direction, + adapt_source=adapt_source) + + if not reverse_direction: + criterion, bind_to_col = \ + self._lazy_strategy._lazywhere, \ + self._lazy_strategy._bind_to_col + else: + criterion, bind_to_col = \ + self._lazy_strategy._rev_lazywhere, \ + self._lazy_strategy._rev_bind_to_col + + if reverse_direction: + mapper = self.mapper + else: + mapper = self.parent + + dict_ = attributes.instance_dict(state.obj()) + + def visit_bindparam(bindparam): + if bindparam._identifying_key in bind_to_col: + bindparam.callable = self._get_attr_w_warn_on_none( + bind_to_col[bindparam._identifying_key], + mapper._get_state_attr_by_column, + state, dict_, + bind_to_col[bindparam._identifying_key], + passive=attributes.PASSIVE_OFF) + + if self.secondary is not None and alias_secondary: + criterion = ClauseAdapter( + self.secondary.alias()).\ + traverse(criterion) + + criterion = visitors.cloned_traverse( + criterion, {}, {'bindparam': visit_bindparam}) + + if adapt_source: + criterion = adapt_source(criterion) + return criterion + + def _get_attr_w_warn_on_none(self, column, fn, *arg, **kw): + def _go(): + value = fn(*arg, **kw) if value is None: - if self.uselist: - return ~sql.exists([1], self.primaryjoin) - else: - return self._optimized_compare(None, - value_is_parent=value_is_parent, - alias_secondary=alias_secondary) - else: - return self._optimized_compare(value, - value_is_parent=value_is_parent, - alias_secondary=alias_secondary) + util.warn( + "Got None for value of column %s; this is unsupported " + "for a relationship comparison and will not " + "currently produce an IS comparison " + "(but may in a future release)" % column) + return value + return _go + + def _lazy_none_clause(self, reverse_direction=False, adapt_source=None): + if not reverse_direction: + criterion, bind_to_col = \ + self._lazy_strategy._lazywhere, \ + self._lazy_strategy._bind_to_col else: - return op(self.comparator, value) - - def _optimized_compare(self, value, value_is_parent=False, - adapt_source=None, - alias_secondary=True): - if value is not None: - value = attributes.instance_state(value) - return self._lazy_strategy.lazy_clause(value, - reverse_direction=not value_is_parent, - alias_secondary=alias_secondary, - adapt_source=adapt_source) + criterion, bind_to_col = \ + self._lazy_strategy._rev_lazywhere, \ + self._lazy_strategy._rev_bind_to_col + + criterion = adapt_criterion_to_null(criterion, bind_to_col) + + if adapt_source: + criterion = adapt_source(criterion) + return criterion def __str__(self): return str(self.parent.class_.__name__) + "." + self.key def merge(self, - session, - source_state, - source_dict, - dest_state, - dest_dict, - load, _recursive): + session, + source_state, + source_dict, + dest_state, + dest_dict, + load, _recursive): if load: for r in self._reverse_property: if (source_state, r) in _recursive: return - if not "merge" in self._cascade: + if "merge" not in self._cascade: return if self.key not in source_dict: @@ -1330,7 +1445,7 @@ def merge(self, if self.uselist: instances = source_state.get_impl(self.key).\ - get(source_state, source_dict) + get(source_state, source_dict) if hasattr(instances, '_sa_adapter'): # convert collections to adapters to get a true iterator instances = instances._sa_adapter @@ -1349,18 +1464,18 @@ def merge(self, current_dict = attributes.instance_dict(current) _recursive[(current_state, self)] = True obj = session._merge(current_state, current_dict, - load=load, _recursive=_recursive) + load=load, _recursive=_recursive) if obj is not None: dest_list.append(obj) if not load: coll = attributes.init_state_collection(dest_state, - dest_dict, self.key) + dest_dict, self.key) for c in dest_list: coll.append_without_event(c) else: - dest_state.get_impl(self.key)._set_iterable(dest_state, - dest_dict, dest_list) + dest_state.get_impl(self.key)._set_iterable( + dest_state, dest_dict, dest_list) else: current = source_dict[self.key] if current is not None: @@ -1368,7 +1483,7 @@ def merge(self, current_dict = attributes.instance_dict(current) _recursive[(current_state, self)] = True obj = session._merge(current_state, current_dict, - load=load, _recursive=_recursive) + load=load, _recursive=_recursive) else: obj = None @@ -1376,10 +1491,10 @@ def merge(self, dest_dict[self.key] = obj else: dest_state.get_impl(self.key).set(dest_state, - dest_dict, obj, None) + dest_dict, obj, None) def _value_as_iterable(self, state, dict_, key, - passive=attributes.PASSIVE_OFF): + passive=attributes.PASSIVE_OFF): """Return a list of tuples (state, obj) for the given key. @@ -1400,7 +1515,7 @@ def _value_as_iterable(self, state, dict_, key, def cascade_iterator(self, type_, state, dict_, visited_states, halt_on=None): - #assert type_ in self._cascade + # assert type_ in self._cascade # only actively lazy load on the 'delete' cascade if type_ != 'delete' or self.passive_deletes: @@ -1410,11 +1525,11 @@ def cascade_iterator(self, type_, state, dict_, if type_ == 'save-update': tuples = state.manager[self.key].impl.\ - get_all_pending(state, dict_) + get_all_pending(state, dict_) else: tuples = self._value_as_iterable(state, dict_, self.key, - passive=passive) + passive=passive) skip_pending = type_ == 'refresh-expire' and 'delete-orphan' \ not in self._cascade @@ -1442,12 +1557,12 @@ def cascade_iterator(self, type_, state, dict_, if not instance_mapper.isa(self.mapper.class_manager.mapper): raise AssertionError("Attribute '%s' on class '%s' " - "doesn't handle objects " - "of type '%s'" % ( - self.key, - self.parent.class_, - c.__class__ - )) + "doesn't handle objects " + "of type '%s'" % ( + self.key, + self.parent.class_, + c.__class__ + )) visited_states.add(instance_state) @@ -1459,16 +1574,19 @@ def _add_reverse_property(self, key): other._reverse_property.add(self) if not other.mapper.common_parent(self.parent): - raise sa_exc.ArgumentError('reverse_property %r on ' - 'relationship %s references relationship %s, which ' - 'does not reference mapper %s' % (key, self, other, - self.parent)) + raise sa_exc.ArgumentError( + 'reverse_property %r on ' + 'relationship %s references relationship %s, which ' + 'does not reference mapper %s' % + (key, self, other, self.parent)) + if self.direction in (ONETOMANY, MANYTOONE) and self.direction \ - == other.direction: - raise sa_exc.ArgumentError('%s and back-reference %s are ' - 'both of the same direction %r. Did you mean to ' - 'set remote_side on the many-to-one side ?' - % (other, self, self.direction)) + == other.direction: + raise sa_exc.ArgumentError( + '%s and back-reference %s are ' + 'both of the same direction %r. Did you mean to ' + 'set remote_side on the many-to-one side ?' % + (other, self, self.direction)) @util.memoized_property def mapper(self): @@ -1479,20 +1597,21 @@ def mapper(self): """ if util.callable(self.argument) and \ - not isinstance(self.argument, (type, mapperlib.Mapper)): + not isinstance(self.argument, (type, mapperlib.Mapper)): argument = self.argument() else: argument = self.argument if isinstance(argument, type): mapper_ = mapperlib.class_mapper(argument, - configure=False) + configure=False) elif isinstance(self.argument, mapperlib.Mapper): mapper_ = argument else: - raise sa_exc.ArgumentError("relationship '%s' expects " - "a class or a mapper argument (received: %s)" - % (self.key, type(argument))) + raise sa_exc.ArgumentError( + "relationship '%s' expects " + "a class or a mapper argument (received: %s)" + % (self.key, type(argument))) return mapper_ @util.memoized_property @@ -1511,10 +1630,10 @@ def do_init(self): self._check_cascade_settings(self._cascade) self._post_init() self._generate_backref() + self._join_condition._warn_for_conflicting_sync_targets() super(RelationshipProperty, self).do_init() self._lazy_strategy = self._get_strategy((("lazy", "select"),)) - def _process_dependent_arguments(self): """Convert incoming configuration arguments to their proper form. @@ -1528,7 +1647,7 @@ def _process_dependent_arguments(self): for attr in ( 'order_by', 'primaryjoin', 'secondaryjoin', 'secondary', '_user_defined_foreign_keys', 'remote_side', - ): + ): attr_value = getattr(self, attr) if util.callable(attr_value): setattr(self, attr, attr_value()) @@ -1546,44 +1665,43 @@ def _process_dependent_arguments(self): # remote_side are all columns, not strings. if self.order_by is not False and self.order_by is not None: self.order_by = [ - expression._only_column_elements(x, "order_by") - for x in - util.to_list(self.order_by)] + expression._only_column_elements(x, "order_by") + for x in + util.to_list(self.order_by)] self._user_defined_foreign_keys = \ util.column_set( - expression._only_column_elements(x, "foreign_keys") - for x in util.to_column_set( - self._user_defined_foreign_keys - )) + expression._only_column_elements(x, "foreign_keys") + for x in util.to_column_set( + self._user_defined_foreign_keys + )) self.remote_side = \ util.column_set( - expression._only_column_elements(x, "remote_side") - for x in - util.to_column_set(self.remote_side)) + expression._only_column_elements(x, "remote_side") + for x in + util.to_column_set(self.remote_side)) self.target = self.mapper.mapped_table - def _setup_join_conditions(self): self._join_condition = jc = JoinCondition( - parent_selectable=self.parent.mapped_table, - child_selectable=self.mapper.mapped_table, - parent_local_selectable=self.parent.local_table, - child_local_selectable=self.mapper.local_table, - primaryjoin=self.primaryjoin, - secondary=self.secondary, - secondaryjoin=self.secondaryjoin, - parent_equivalents=self.parent._equivalent_columns, - child_equivalents=self.mapper._equivalent_columns, - consider_as_foreign_keys=self._user_defined_foreign_keys, - local_remote_pairs=self.local_remote_pairs, - remote_side=self.remote_side, - self_referential=self._is_self_referential, - prop=self, - support_sync=not self.viewonly, - can_be_synced_fn=self._columns_are_mapped + parent_selectable=self.parent.mapped_table, + child_selectable=self.mapper.mapped_table, + parent_local_selectable=self.parent.local_table, + child_local_selectable=self.mapper.local_table, + primaryjoin=self.primaryjoin, + secondary=self.secondary, + secondaryjoin=self.secondaryjoin, + parent_equivalents=self.parent._equivalent_columns, + child_equivalents=self.mapper._equivalent_columns, + consider_as_foreign_keys=self._user_defined_foreign_keys, + local_remote_pairs=self.local_remote_pairs, + remote_side=self.remote_side, + self_referential=self._is_self_referential, + prop=self, + support_sync=not self.viewonly, + can_be_synced_fn=self._columns_are_mapped ) self.primaryjoin = jc.deannotated_primaryjoin self.secondaryjoin = jc.deannotated_secondaryjoin @@ -1599,17 +1717,17 @@ def _check_conflicts(self): """Test that this relationship is legal, warn about inheritance conflicts.""" - if not self.is_primary() \ - and not mapperlib.class_mapper( - self.parent.class_, - configure=False).has_property(self.key): - raise sa_exc.ArgumentError("Attempting to assign a new " - "relationship '%s' to a non-primary mapper on " - "class '%s'. New relationships can only be added " - "to the primary mapper, i.e. the very first mapper " - "created for class '%s' " % (self.key, - self.parent.class_.__name__, - self.parent.class_.__name__)) + if self.parent.non_primary and not mapperlib.class_mapper( + self.parent.class_, + configure=False).has_property(self.key): + raise sa_exc.ArgumentError( + "Attempting to assign a new " + "relationship '%s' to a non-primary mapper on " + "class '%s'. New relationships can only be added " + "to the primary mapper, i.e. the very first mapper " + "created for class '%s' " % + (self.key, self.parent.class_.__name__, + self.parent.class_.__name__)) # check for conflicting relationship() on superclass if not self.parent.concrete: @@ -1644,28 +1762,28 @@ def _check_cascade_settings(self, cascade): and (self.direction is MANYTOMANY or self.direction is MANYTOONE): raise sa_exc.ArgumentError( - 'On %s, delete-orphan cascade is not supported ' - 'on a many-to-many or many-to-one relationship ' - 'when single_parent is not set. Set ' - 'single_parent=True on the relationship().' - % self) + 'On %s, delete-orphan cascade is not supported ' + 'on a many-to-many or many-to-one relationship ' + 'when single_parent is not set. Set ' + 'single_parent=True on the relationship().' + % self) if self.direction is MANYTOONE and self.passive_deletes: util.warn("On %s, 'passive_deletes' is normally configured " "on one-to-many, one-to-one, many-to-many " "relationships only." - % self) + % self) if self.passive_deletes == 'all' and \ - ("delete" in cascade or - "delete-orphan" in cascade): + ("delete" in cascade or + "delete-orphan" in cascade): raise sa_exc.ArgumentError( - "On %s, can't set passive_deletes='all' in conjunction " - "with 'delete' or 'delete-orphan' cascade" % self) + "On %s, can't set passive_deletes='all' in conjunction " + "with 'delete' or 'delete-orphan' cascade" % self) if cascade.delete_orphan: self.mapper.primary_mapper()._delete_orphans.append( - (self.key, self.parent.class_) - ) + (self.key, self.parent.class_) + ) def _columns_are_mapped(self, *cols): """Return True if all columns in the given collection are @@ -1685,7 +1803,7 @@ def _generate_backref(self): """Interpret the 'backref' instruction to create a :func:`.relationship` complementary to this one.""" - if not self.is_primary(): + if self.parent.non_primary: return if self.backref is not None and not self.back_populates: if isinstance(self.backref, util.string_types): @@ -1695,13 +1813,14 @@ def _generate_backref(self): mapper = self.mapper.primary_mapper() check = set(mapper.iterate_to_root()).\ - union(mapper.self_and_descendants) + union(mapper.self_and_descendants) for m in check: if m.has_property(backref_key): - raise sa_exc.ArgumentError("Error creating backref " - "'%s' on relationship '%s': property of that " - "name exists on mapper '%s'" % (backref_key, - self, m)) + raise sa_exc.ArgumentError( + "Error creating backref " + "'%s' on relationship '%s': property of that " + "name exists on mapper '%s'" % + (backref_key, self, m)) # determine primaryjoin/secondaryjoin for the # backref. Use the one we had, so that @@ -1711,13 +1830,16 @@ def _generate_backref(self): # for many to many, just switch primaryjoin/ # secondaryjoin. use the annotated # pj/sj on the _join_condition. - pj = kwargs.pop('primaryjoin', - self._join_condition.secondaryjoin_minus_local) - sj = kwargs.pop('secondaryjoin', - self._join_condition.primaryjoin_minus_local) + pj = kwargs.pop( + 'primaryjoin', + self._join_condition.secondaryjoin_minus_local) + sj = kwargs.pop( + 'secondaryjoin', + self._join_condition.primaryjoin_minus_local) else: - pj = kwargs.pop('primaryjoin', - self._join_condition.primaryjoin_reverse_remote) + pj = kwargs.pop( + 'primaryjoin', + self._join_condition.primaryjoin_reverse_remote) sj = kwargs.pop('secondaryjoin', None) if sj: raise sa_exc.InvalidRequestError( @@ -1726,7 +1848,7 @@ def _generate_backref(self): ) foreign_keys = kwargs.pop('foreign_keys', - self._user_defined_foreign_keys) + self._user_defined_foreign_keys) parent = self.parent.primary_mapper() kwargs.setdefault('viewonly', self.viewonly) kwargs.setdefault('post_update', self.post_update) @@ -1763,8 +1885,8 @@ def _is_self_referential(self): return self.mapper.common_parent(self.parent) def _create_joins(self, source_polymorphic=False, - source_selectable=None, dest_polymorphic=False, - dest_selectable=None, of_type=None): + source_selectable=None, dest_polymorphic=False, + dest_selectable=None, of_type=None): if source_selectable is None: if source_polymorphic and self.parent.with_polymorphic: source_selectable = self.parent._with_polymorphic_selectable @@ -1797,7 +1919,8 @@ def _create_joins(self, source_polymorphic=False, if dest_selectable is None: dest_selectable = self.mapper.local_table return (primaryjoin, secondaryjoin, source_selectable, - dest_selectable, secondary, target_adapter) + dest_selectable, secondary, target_adapter) + def _annotate_columns(element, annotations): def clone(elem): @@ -1813,23 +1936,23 @@ def clone(elem): class JoinCondition(object): def __init__(self, - parent_selectable, - child_selectable, - parent_local_selectable, - child_local_selectable, - primaryjoin=None, - secondary=None, - secondaryjoin=None, - parent_equivalents=None, - child_equivalents=None, - consider_as_foreign_keys=None, - local_remote_pairs=None, - remote_side=None, - self_referential=False, - prop=None, - support_sync=True, - can_be_synced_fn=lambda *c: True - ): + parent_selectable, + child_selectable, + parent_local_selectable, + child_local_selectable, + primaryjoin=None, + secondary=None, + secondaryjoin=None, + parent_equivalents=None, + child_equivalents=None, + consider_as_foreign_keys=None, + local_remote_pairs=None, + remote_side=None, + self_referential=False, + prop=None, + support_sync=True, + can_be_synced_fn=lambda *c: True + ): self.parent_selectable = parent_selectable self.parent_local_selectable = parent_local_selectable self.child_selectable = child_selectable @@ -1863,26 +1986,26 @@ def _log_joins(self): return log = self.prop.logger log.info('%s setup primary join %s', self.prop, - self.primaryjoin) + self.primaryjoin) log.info('%s setup secondary join %s', self.prop, - self.secondaryjoin) + self.secondaryjoin) log.info('%s synchronize pairs [%s]', self.prop, - ','.join('(%s => %s)' % (l, r) for (l, r) in - self.synchronize_pairs)) + ','.join('(%s => %s)' % (l, r) for (l, r) in + self.synchronize_pairs)) log.info('%s secondary synchronize pairs [%s]', self.prop, - ','.join('(%s => %s)' % (l, r) for (l, r) in - self.secondary_synchronize_pairs or [])) + ','.join('(%s => %s)' % (l, r) for (l, r) in + self.secondary_synchronize_pairs or [])) log.info('%s local/remote pairs [%s]', self.prop, - ','.join('(%s / %s)' % (l, r) for (l, r) in - self.local_remote_pairs)) + ','.join('(%s / %s)' % (l, r) for (l, r) in + self.local_remote_pairs)) log.info('%s remote columns [%s]', self.prop, - ','.join('%s' % col for col in self.remote_columns) - ) + ','.join('%s' % col for col in self.remote_columns) + ) log.info('%s local columns [%s]', self.prop, - ','.join('%s' % col for col in self.local_columns) - ) + ','.join('%s' % col for col in self.local_columns) + ) log.info('%s relationship direction %s', self.prop, - self.direction) + self.direction) def _determine_joins(self): """Determine the 'primaryjoin' and 'secondaryjoin' attributes, @@ -1894,9 +2017,9 @@ def _determine_joins(self): """ if self.secondaryjoin is not None and self.secondary is None: raise sa_exc.ArgumentError( - "Property %s specified with secondary " - "join condition but " - "no secondary argument" % self.prop) + "Property %s specified with secondary " + "join condition but " + "no secondary argument" % self.prop) # find a join between the given mapper's mapped table and # the given table. will try the mapper's local table first @@ -1933,47 +2056,47 @@ def _determine_joins(self): ) except sa_exc.NoForeignKeysError: if self.secondary is not None: - raise sa_exc.NoForeignKeysError("Could not determine join " - "condition between parent/child tables on " - "relationship %s - there are no foreign keys " - "linking these tables via secondary table '%s'. " - "Ensure that referencing columns are associated " - "with a ForeignKey or ForeignKeyConstraint, or " - "specify 'primaryjoin' and 'secondaryjoin' " - "expressions." - % (self.prop, self.secondary)) + raise sa_exc.NoForeignKeysError( + "Could not determine join " + "condition between parent/child tables on " + "relationship %s - there are no foreign keys " + "linking these tables via secondary table '%s'. " + "Ensure that referencing columns are associated " + "with a ForeignKey or ForeignKeyConstraint, or " + "specify 'primaryjoin' and 'secondaryjoin' " + "expressions." % (self.prop, self.secondary)) else: - raise sa_exc.NoForeignKeysError("Could not determine join " - "condition between parent/child tables on " - "relationship %s - there are no foreign keys " - "linking these tables. " - "Ensure that referencing columns are associated " - "with a ForeignKey or ForeignKeyConstraint, or " - "specify a 'primaryjoin' expression." - % self.prop) + raise sa_exc.NoForeignKeysError( + "Could not determine join " + "condition between parent/child tables on " + "relationship %s - there are no foreign keys " + "linking these tables. " + "Ensure that referencing columns are associated " + "with a ForeignKey or ForeignKeyConstraint, or " + "specify a 'primaryjoin' expression." % self.prop) except sa_exc.AmbiguousForeignKeysError: if self.secondary is not None: raise sa_exc.AmbiguousForeignKeysError( - "Could not determine join " - "condition between parent/child tables on " - "relationship %s - there are multiple foreign key " - "paths linking the tables via secondary table '%s'. " - "Specify the 'foreign_keys' " - "argument, providing a list of those columns which " - "should be counted as containing a foreign key " - "reference from the secondary table to each of the " - "parent and child tables." - % (self.prop, self.secondary)) + "Could not determine join " + "condition between parent/child tables on " + "relationship %s - there are multiple foreign key " + "paths linking the tables via secondary table '%s'. " + "Specify the 'foreign_keys' " + "argument, providing a list of those columns which " + "should be counted as containing a foreign key " + "reference from the secondary table to each of the " + "parent and child tables." + % (self.prop, self.secondary)) else: raise sa_exc.AmbiguousForeignKeysError( - "Could not determine join " - "condition between parent/child tables on " - "relationship %s - there are multiple foreign key " - "paths linking the tables. Specify the " - "'foreign_keys' argument, providing a list of those " - "columns which should be counted as containing a " - "foreign key reference to the parent table." - % self.prop) + "Could not determine join " + "condition between parent/child tables on " + "relationship %s - there are multiple foreign key " + "paths linking the tables. Specify the " + "'foreign_keys' argument, providing a list of those " + "columns which should be counted as containing a " + "foreign key reference to the parent table." + % self.prop) @property def primaryjoin_minus_local(self): @@ -1981,7 +2104,8 @@ def primaryjoin_minus_local(self): @property def secondaryjoin_minus_local(self): - return _deep_deannotate(self.secondaryjoin, values=("local", "remote")) + return _deep_deannotate(self.secondaryjoin, + values=("local", "remote")) @util.memoized_property def primaryjoin_reverse_remote(self): @@ -2007,12 +2131,12 @@ def replace(element): v['remote'] = True return element._with_annotations(v) return visitors.replacement_traverse( - self.primaryjoin, {}, replace) + self.primaryjoin, {}, replace) else: if self._has_foreign_annotations: # TODO: coverage return _deep_deannotate(self.primaryjoin, - values=("local", "remote")) + values=("local", "remote")) else: return _deep_deannotate(self.primaryjoin) @@ -2069,7 +2193,7 @@ def _annotate_present_fks(self): def is_foreign(a, b): if isinstance(a, schema.Column) and \ - isinstance(b, schema.Column): + isinstance(b, schema.Column): if a.references(b): return a elif b.references(a): @@ -2083,7 +2207,7 @@ def is_foreign(a, b): def visit_binary(binary): if not isinstance(binary.left, sql.ColumnElement) or \ - not isinstance(binary.right, sql.ColumnElement): + not isinstance(binary.right, sql.ColumnElement): return if "foreign" not in binary.left._annotations and \ @@ -2092,10 +2216,10 @@ def visit_binary(binary): if col is not None: if col.compare(binary.left): binary.left = binary.left._annotate( - {"foreign": True}) + {"foreign": True}) elif col.compare(binary.right): binary.right = binary.right._annotate( - {"foreign": True}) + {"foreign": True}) self.primaryjoin = visitors.cloned_traverse( self.primaryjoin, @@ -2121,25 +2245,26 @@ def _refers_to_parent_table(self): def visit_binary(binary): c, f = binary.left, binary.right if ( - isinstance(c, expression.ColumnClause) and \ - isinstance(f, expression.ColumnClause) and \ - pt.is_derived_from(c.table) and \ - pt.is_derived_from(f.table) and \ - mt.is_derived_from(c.table) and \ + isinstance(c, expression.ColumnClause) and + isinstance(f, expression.ColumnClause) and + pt.is_derived_from(c.table) and + pt.is_derived_from(f.table) and + mt.is_derived_from(c.table) and mt.is_derived_from(f.table) ): result[0] = True visitors.traverse( - self.primaryjoin, - {}, - {"binary": visit_binary} - ) + self.primaryjoin, + {}, + {"binary": visit_binary} + ) return result[0] def _tables_overlap(self): """Return True if parent/child tables have some overlap.""" - return selectables_overlap(self.parent_selectable, self.child_selectable) + return selectables_overlap( + self.parent_selectable, self.child_selectable) def _annotate_remote(self): """Annotate the primaryjoin and secondaryjoin @@ -2155,7 +2280,7 @@ def _annotate_remote(self): elif self._local_remote_pairs or self._remote_side: self._annotate_remote_from_args() elif self._refers_to_parent_table(): - self._annotate_selfref(lambda col: "foreign" in col._annotations) + self._annotate_selfref(lambda col: "foreign" in col._annotations, False) elif self._tables_overlap(): self._annotate_remote_with_overlap() else: @@ -2170,11 +2295,11 @@ def repl(element): if self.secondary.c.contains_column(element): return element._annotate({"remote": True}) self.primaryjoin = visitors.replacement_traverse( - self.primaryjoin, {}, repl) + self.primaryjoin, {}, repl) self.secondaryjoin = visitors.replacement_traverse( - self.secondaryjoin, {}, repl) + self.secondaryjoin, {}, repl) - def _annotate_selfref(self, fn): + def _annotate_selfref(self, fn, remote_side_given): """annotate 'remote' in primaryjoin, secondaryjoin when the relationship is detected as self-referential. @@ -2188,13 +2313,13 @@ def visit_binary(binary): binary.left = binary.left._annotate({"remote": True}) if fn(binary.right) and not equated: binary.right = binary.right._annotate( - {"remote": True}) - else: + {"remote": True}) + elif not remote_side_given: self._warn_non_column_elements() self.primaryjoin = visitors.cloned_traverse( - self.primaryjoin, {}, - {"binary": visit_binary}) + self.primaryjoin, {}, + {"binary": visit_binary}) def _annotate_remote_from_args(self): """annotate 'remote' in primaryjoin, secondaryjoin @@ -2205,22 +2330,22 @@ def _annotate_remote_from_args(self): if self._local_remote_pairs: if self._remote_side: raise sa_exc.ArgumentError( - "remote_side argument is redundant " - "against more detailed _local_remote_side " - "argument.") + "remote_side argument is redundant " + "against more detailed _local_remote_side " + "argument.") remote_side = [r for (l, r) in self._local_remote_pairs] else: remote_side = self._remote_side if self._refers_to_parent_table(): - self._annotate_selfref(lambda col: col in remote_side) + self._annotate_selfref(lambda col: col in remote_side, True) else: def repl(element): if element in remote_side: return element._annotate({"remote": True}) self.primaryjoin = visitors.replacement_traverse( - self.primaryjoin, {}, repl) + self.primaryjoin, {}, repl) def _annotate_remote_with_overlap(self): """annotate 'remote' in primaryjoin, secondaryjoin @@ -2231,9 +2356,12 @@ def _annotate_remote_with_overlap(self): """ def visit_binary(binary): binary.left, binary.right = proc_left_right(binary.left, - binary.right) + binary.right) binary.right, binary.left = proc_left_right(binary.right, - binary.left) + binary.left) + + check_entities = self.prop is not None and \ + self.prop.mapper is not self.prop.parent def proc_left_right(left, right): if isinstance(left, expression.ColumnClause) and \ @@ -2241,14 +2369,20 @@ def proc_left_right(left, right): if self.child_selectable.c.contains_column(right) and \ self.parent_selectable.c.contains_column(left): right = right._annotate({"remote": True}) + elif check_entities and \ + right._annotations.get('parentmapper') is self.prop.mapper: + right = right._annotate({"remote": True}) + elif check_entities and \ + left._annotations.get('parentmapper') is self.prop.mapper: + left = left._annotate({"remote": True}) else: self._warn_non_column_elements() return left, right self.primaryjoin = visitors.cloned_traverse( - self.primaryjoin, {}, - {"binary": visit_binary}) + self.primaryjoin, {}, + {"binary": visit_binary}) def _annotate_remote_distinct_selectables(self): """annotate 'remote' in primaryjoin, secondaryjoin @@ -2258,14 +2392,13 @@ def _annotate_remote_distinct_selectables(self): """ def repl(element): if self.child_selectable.c.contains_column(element) and \ - ( - not self.parent_local_selectable.c.\ - contains_column(element) - or self.child_local_selectable.c.\ - contains_column(element)): + (not self.parent_local_selectable.c. + contains_column(element) or + self.child_local_selectable.c. + contains_column(element)): return element._annotate({"remote": True}) self.primaryjoin = visitors.replacement_traverse( - self.primaryjoin, {}, repl) + self.primaryjoin, {}, repl) def _warn_non_column_elements(self): util.warn( @@ -2291,7 +2424,7 @@ def _annotate_local(self): if self._local_remote_pairs: local_side = util.column_set([l for (l, r) - in self._local_remote_pairs]) + in self._local_remote_pairs]) else: local_side = util.column_set(self.parent_selectable.c) @@ -2300,20 +2433,20 @@ def locals_(elem): elem in local_side: return elem._annotate({"local": True}) self.primaryjoin = visitors.replacement_traverse( - self.primaryjoin, {}, locals_ - ) + self.primaryjoin, {}, locals_ + ) def _check_remote_side(self): if not self.local_remote_pairs: - raise sa_exc.ArgumentError('Relationship %s could ' - 'not determine any unambiguous local/remote column ' - 'pairs based on join condition and remote_side ' - 'arguments. ' - 'Consider using the remote() annotation to ' - 'accurately mark those elements of the join ' - 'condition that are on the remote side of ' - 'the relationship.' - % (self.prop, )) + raise sa_exc.ArgumentError( + 'Relationship %s could ' + 'not determine any unambiguous local/remote column ' + 'pairs based on join condition and remote_side ' + 'arguments. ' + 'Consider using the remote() annotation to ' + 'accurately mark those elements of the join ' + 'condition that are on the remote side of ' + 'the relationship.' % (self.prop, )) def _check_foreign_cols(self, join_condition, primary): """Check the foreign key columns collected and emit error @@ -2322,7 +2455,7 @@ def _check_foreign_cols(self, join_condition, primary): can_sync = False foreign_cols = self._gather_columns_with_annotation( - join_condition, "foreign") + join_condition, "foreign") has_foreign = bool(foreign_cols) @@ -2340,13 +2473,13 @@ def _check_foreign_cols(self, join_condition, primary): # (not just ==), perhaps they need to turn on "viewonly=True". if self.support_sync and has_foreign and not can_sync: err = "Could not locate any simple equality expressions "\ - "involving locally mapped foreign key columns for "\ - "%s join condition "\ - "'%s' on relationship %s." % ( - primary and 'primary' or 'secondary', - join_condition, - self.prop - ) + "involving locally mapped foreign key columns for "\ + "%s join condition "\ + "'%s' on relationship %s." % ( + primary and 'primary' or 'secondary', + join_condition, + self.prop + ) err += \ " Ensure that referencing columns are associated "\ "with a ForeignKey or ForeignKeyConstraint, or are "\ @@ -2357,11 +2490,11 @@ def _check_foreign_cols(self, join_condition, primary): raise sa_exc.ArgumentError(err) else: err = "Could not locate any relevant foreign key columns "\ - "for %s join condition '%s' on relationship %s." % ( - primary and 'primary' or 'secondary', - join_condition, - self.prop - ) + "for %s join condition '%s' on relationship %s." % ( + primary and 'primary' or 'secondary', + join_condition, + self.prop + ) err += \ ' Ensure that referencing columns are associated '\ 'with a ForeignKey or ForeignKeyConstraint, or are '\ @@ -2382,25 +2515,47 @@ def _determine_direction(self): # fk collection which suggests ONETOMANY. onetomany_fk = targetcols.intersection( - self.foreign_key_columns) + self.foreign_key_columns) # fk collection which suggests MANYTOONE. manytoone_fk = parentcols.intersection( - self.foreign_key_columns) + self.foreign_key_columns) if onetomany_fk and manytoone_fk: # fks on both sides. test for overlap of local/remote - # with foreign key - self_equated = self.remote_columns.intersection( - self.local_columns - ) - onetomany_local = self.remote_columns.\ - intersection(self.foreign_key_columns).\ - difference(self_equated) - manytoone_local = self.local_columns.\ - intersection(self.foreign_key_columns).\ - difference(self_equated) + # with foreign key. + # we will gather columns directly from their annotations + # without deannotating, so that we can distinguish on a column + # that refers to itself. + + # 1. columns that are both remote and FK suggest + # onetomany. + onetomany_local = self._gather_columns_with_annotation( + self.primaryjoin, "remote", "foreign") + + # 2. columns that are FK but are not remote (e.g. local) + # suggest manytoone. + manytoone_local = set([c for c in + self._gather_columns_with_annotation( + self.primaryjoin, + "foreign") + if "remote" not in c._annotations]) + + # 3. if both collections are present, remove columns that + # refer to themselves. This is for the case of + # and_(Me.id == Me.remote_id, Me.version == Me.version) + if onetomany_local and manytoone_local: + self_equated = self.remote_columns.intersection( + self.local_columns + ) + onetomany_local = onetomany_local.difference(self_equated) + manytoone_local = manytoone_local.difference(self_equated) + + # at this point, if only one or the other collection is + # present, we know the direction, otherwise it's still + # ambiguous. + if onetomany_local and not manytoone_local: self.direction = ONETOMANY elif manytoone_local and not onetomany_local: @@ -2420,10 +2575,11 @@ def _determine_direction(self): elif manytoone_fk: self.direction = MANYTOONE else: - raise sa_exc.ArgumentError("Can't determine relationship " - "direction for relationship '%s' - foreign " - "key columns are present in neither the parent " - "nor the child's mapped tables" % self.prop) + raise sa_exc.ArgumentError( + "Can't determine relationship " + "direction for relationship '%s' - foreign " + "key columns are present in neither the parent " + "nor the child's mapped tables" % self.prop) def _deannotate_pairs(self, collection): """provide deannotation for the various lists of @@ -2433,7 +2589,7 @@ def _deannotate_pairs(self, collection): """ return [(x._deannotate(), y._deannotate()) - for x, y in collection] + for x, y in collection] def _setup_pairs(self): sync_pairs = [] @@ -2471,6 +2627,60 @@ def visit_binary(binary, left, right): self.secondary_synchronize_pairs = \ self._deannotate_pairs(secondary_sync_pairs) + _track_overlapping_sync_targets = weakref.WeakKeyDictionary() + + def _warn_for_conflicting_sync_targets(self): + if not self.support_sync: + return + + # we would like to detect if we are synchronizing any column + # pairs in conflict with another relationship that wishes to sync + # an entirely different column to the same target. This is a + # very rare edge case so we will try to minimize the memory/overhead + # impact of this check + for from_, to_ in [ + (from_, to_) for (from_, to_) in self.synchronize_pairs + ] + [ + (from_, to_) for (from_, to_) in self.secondary_synchronize_pairs + ]: + # save ourselves a ton of memory and overhead by only + # considering columns that are subject to a overlapping + # FK constraints at the core level. This condition can arise + # if multiple relationships overlap foreign() directly, but + # we're going to assume it's typically a ForeignKeyConstraint- + # level configuration that benefits from this warning. + if len(to_.foreign_keys) < 2: + continue + + if to_ not in self._track_overlapping_sync_targets: + self._track_overlapping_sync_targets[to_] = \ + weakref.WeakKeyDictionary({self.prop: from_}) + else: + other_props = [] + prop_to_from = self._track_overlapping_sync_targets[to_] + for pr, fr_ in prop_to_from.items(): + if pr.mapper in mapperlib._mapper_registry and \ + fr_ is not from_ and \ + pr not in self.prop._reverse_property: + other_props.append((pr, fr_)) + + if other_props: + util.warn( + "relationship '%s' will copy column %s to column %s, " + "which conflicts with relationship(s): %s. " + "Consider applying " + "viewonly=True to read-only relationships, or provide " + "a primaryjoin condition marking writable columns " + "with the foreign() annotation." % ( + self.prop, + from_, to_, + ", ".join( + "'%s' (copies %s to %s)" % (pr, fr_, to_) + for (pr, fr_) in other_props) + ) + ) + self._track_overlapping_sync_targets[to_][self.prop] = from_ + @util.memoized_property def remote_columns(self): return self._gather_join_annotations("remote") @@ -2497,12 +2707,12 @@ def deannotated_secondaryjoin(self): def _gather_join_annotations(self, annotation): s = set( self._gather_columns_with_annotation( - self.primaryjoin, annotation) + self.primaryjoin, annotation) ) if self.secondaryjoin is not None: s.update( self._gather_columns_with_annotation( - self.secondaryjoin, annotation) + self.secondaryjoin, annotation) ) return set([x._deannotate() for x in s]) @@ -2514,9 +2724,9 @@ def _gather_columns_with_annotation(self, clause, *annotation): ]) def join_targets(self, source_selectable, - dest_selectable, - aliased, - single_crit=None): + dest_selectable, + aliased, + single_crit=None): """Given a source and destination selectable, create a join between them. @@ -2532,8 +2742,8 @@ def join_targets(self, source_selectable, # its internal structure remains fixed # regardless of context. dest_selectable = _shallow_annotate( - dest_selectable, - {'no_replacement_traverse': True}) + dest_selectable, + {'no_replacement_traverse': True}) primaryjoin, secondaryjoin, secondary = self.primaryjoin, \ self.secondaryjoin, self.secondary @@ -2555,24 +2765,26 @@ def join_targets(self, source_selectable, primary_aliasizer = ClauseAdapter(secondary) secondary_aliasizer = \ ClauseAdapter(dest_selectable, - equivalents=self.child_equivalents).\ - chain(primary_aliasizer) + equivalents=self.child_equivalents).\ + chain(primary_aliasizer) if source_selectable is not None: primary_aliasizer = \ ClauseAdapter(secondary).\ - chain(ClauseAdapter(source_selectable, + chain(ClauseAdapter( + source_selectable, equivalents=self.parent_equivalents)) secondaryjoin = \ secondary_aliasizer.traverse(secondaryjoin) else: - primary_aliasizer = ClauseAdapter(dest_selectable, - exclude_fn=_ColInAnnotations("local"), - equivalents=self.child_equivalents) + primary_aliasizer = ClauseAdapter( + dest_selectable, + exclude_fn=_ColInAnnotations("local"), + equivalents=self.child_equivalents) if source_selectable is not None: primary_aliasizer.chain( ClauseAdapter(source_selectable, - exclude_fn=_ColInAnnotations("remote"), - equivalents=self.parent_equivalents)) + exclude_fn=_ColInAnnotations("remote"), + equivalents=self.parent_equivalents)) secondary_aliasizer = None primaryjoin = primary_aliasizer.traverse(primaryjoin) @@ -2581,64 +2793,67 @@ def join_targets(self, source_selectable, else: target_adapter = None return primaryjoin, secondaryjoin, secondary, \ - target_adapter, dest_selectable + target_adapter, dest_selectable def create_lazy_clause(self, reverse_direction=False): binds = util.column_dict() - lookup = util.column_dict() equated_columns = util.column_dict() - being_replaced = set() - if reverse_direction and self.secondaryjoin is None: + has_secondary = self.secondaryjoin is not None + + if has_secondary: + lookup = collections.defaultdict(list) for l, r in self.local_remote_pairs: - _list = lookup.setdefault(r, []) - _list.append((r, l)) - equated_columns[l] = r - else: - # replace all "local side" columns, which is - # anything that isn't marked "remote" - being_replaced.update(self.local_columns) + lookup[l].append((l, r)) + equated_columns[r] = l + elif not reverse_direction: for l, r in self.local_remote_pairs: - _list = lookup.setdefault(l, []) - _list.append((l, r)) equated_columns[r] = l + else: + for l, r in self.local_remote_pairs: + equated_columns[l] = r def col_to_bind(col): - if col in being_replaced or col in lookup: - if col in lookup: - for tobind, equated in lookup[col]: - if equated in binds: - return None - else: - assert not reverse_direction + + if ( + (not reverse_direction and 'local' in col._annotations) or + reverse_direction and ( + (has_secondary and col in lookup) or + (not has_secondary and 'remote' in col._annotations) + ) + ): if col not in binds: binds[col] = sql.bindparam( None, None, type_=col.type, unique=True) return binds[col] return None - lazywhere = self.deannotated_primaryjoin - - if self.deannotated_secondaryjoin is None or not reverse_direction: + lazywhere = self.primaryjoin + if self.secondaryjoin is None or not reverse_direction: lazywhere = visitors.replacement_traverse( - lazywhere, {}, col_to_bind) + lazywhere, {}, col_to_bind) - if self.deannotated_secondaryjoin is not None: - secondaryjoin = self.deannotated_secondaryjoin + if self.secondaryjoin is not None: + secondaryjoin = self.secondaryjoin if reverse_direction: secondaryjoin = visitors.replacement_traverse( - secondaryjoin, {}, col_to_bind) + secondaryjoin, {}, col_to_bind) lazywhere = sql.and_(lazywhere, secondaryjoin) bind_to_col = dict((binds[col].key, col) for col in binds) + # this is probably not necessary + lazywhere = _deep_deannotate(lazywhere) + return lazywhere, bind_to_col, equated_columns + class _ColInAnnotations(object): """Seralizable equivalent to: lambda c: "name" in c._annotations """ + def __init__(self, name): self.name = name diff --git a/lib/sqlalchemy/orm/scoping.py b/lib/sqlalchemy/orm/scoping.py index c1f8f319f7..6306514cb7 100644 --- a/lib/sqlalchemy/orm/scoping.py +++ b/lib/sqlalchemy/orm/scoping.py @@ -1,5 +1,6 @@ # orm/scoping.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -20,6 +21,12 @@ class scoped_session(object): """ + session_factory = None + """The `session_factory` provided to `__init__` is stored in this + attribute and may be accessed at a later time. This can be useful when + a new non-scoped :class:`.Session` or :class:`.Connection` to the + database is needed.""" + def __init__(self, session_factory, scopefunc=None): """Construct a new :class:`.scoped_session`. @@ -37,6 +44,7 @@ def __init__(self, session_factory, scopefunc=None): """ self.session_factory = session_factory + if scopefunc: self.registry = ScopedRegistry(session_factory, scopefunc) else: @@ -44,12 +52,12 @@ def __init__(self, session_factory, scopefunc=None): def __call__(self, **kw): """Return the current :class:`.Session`, creating it - using the session factory if not present. + using the :attr:`.scoped_session.session_factory` if not present. :param \**kw: Keyword arguments will be passed to the - session factory callable, if an existing :class:`.Session` - is not present. If the :class:`.Session` is present and - keyword arguments have been passed, + :attr:`.scoped_session.session_factory` callable, if an existing + :class:`.Session` is not present. If the :class:`.Session` is present + and keyword arguments have been passed, :exc:`~sqlalchemy.exc.InvalidRequestError` is raised. """ @@ -58,8 +66,8 @@ def __call__(self, **kw): if scope is not None: if self.registry.has(): raise sa_exc.InvalidRequestError( - "Scoped session is already present; " - "no new arguments may be specified.") + "Scoped session is already present; " + "no new arguments may be specified.") else: sess = self.session_factory(**kw) self.registry.set(sess) @@ -96,8 +104,8 @@ def configure(self, **kwargs): if self.registry.has(): warn('At least one scoped session is already present. ' - ' configure() can not affect sessions that have ' - 'already been created.') + ' configure() can not affect sessions that have ' + 'already been created.') self.session_factory.configure(**kwargs) diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py index 5bd46691e9..000441fb95 100644 --- a/lib/sqlalchemy/orm/session.py +++ b/lib/sqlalchemy/orm/session.py @@ -1,35 +1,39 @@ # orm/session.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """Provides the Session class and related utilities.""" - import weakref from .. import util, sql, engine, exc as sa_exc from ..sql import util as sql_util, expression from . import ( SessionExtension, attributes, exc, query, loading, identity - ) +) from ..inspection import inspect from .base import ( object_mapper, class_mapper, _class_to_mapper, _state_mapper, object_state, _none_set, state_str, instance_str - ) +) +import itertools +from . import persistence from .unitofwork import UOWTransaction from . import state as statelib import sys -__all__ = ['Session', 'SessionTransaction', 'SessionExtension', 'sessionmaker'] +__all__ = ['Session', 'SessionTransaction', + 'SessionExtension', 'sessionmaker'] _sessions = weakref.WeakValueDictionary() """Weak-referencing dictionary of :class:`.Session` objects. """ + def _state_session(state): """Given an :class:`.InstanceState`, return the :class:`.Session` associated, if any. @@ -42,7 +46,6 @@ def _state_session(state): return None - class _SessionClassMethods(object): """Class-level methods for :class:`.Session`, :class:`.sessionmaker`.""" @@ -80,6 +83,7 @@ def object_session(cls, instance): DEACTIVE = util.symbol('DEACTIVE') CLOSED = util.symbol('CLOSED') + class SessionTransaction(object): """A :class:`.Session`-level transaction. @@ -184,20 +188,20 @@ def is_active(self): return self.session is not None and self._state is ACTIVE def _assert_active(self, prepared_ok=False, - rollback_ok=False, - deactive_ok=False, - closed_msg="This transaction is closed"): + rollback_ok=False, + deactive_ok=False, + closed_msg="This transaction is closed"): if self._state is COMMITTED: raise sa_exc.InvalidRequestError( - "This session is in 'committed' state; no further " - "SQL can be emitted within this transaction." - ) + "This session is in 'committed' state; no further " + "SQL can be emitted within this transaction." + ) elif self._state is PREPARED: if not prepared_ok: raise sa_exc.InvalidRequestError( - "This session is in 'prepared' state; no further " - "SQL can be emitted within this transaction." - ) + "This session is in 'prepared' state; no further " + "SQL can be emitted within this transaction." + ) elif self._state is DEACTIVE: if not deactive_ok and not rollback_ok: if self._rollback_exception: @@ -214,7 +218,7 @@ def _assert_active(self, prepared_ok=False, "This Session's transaction has been rolled back " "by a nested rollback() call. To begin a new " "transaction, issue Session.rollback() first." - ) + ) elif self._state is CLOSED: raise sa_exc.ResourceClosedError(closed_msg) @@ -222,10 +226,10 @@ def _assert_active(self, prepared_ok=False, def _is_transaction_boundary(self): return self.nested or not self._parent - def connection(self, bindkey, **kwargs): + def connection(self, bindkey, execution_options=None, **kwargs): self._assert_active() bind = self.session.get_bind(bindkey, **kwargs) - return self._connection_for_bind(bind) + return self._connection_for_bind(bind, execution_options) def _begin(self, nested=False): self._assert_active() @@ -233,14 +237,21 @@ def _begin(self, nested=False): self.session, self, nested=nested) def _iterate_parents(self, upto=None): - if self._parent is upto: - return (self,) - else: - if self._parent is None: + + current = self + result = () + while current: + result += (current, ) + if current._parent is upto: + break + elif current._parent is None: raise sa_exc.InvalidRequestError( "Transaction %s is not on the active transaction list" % ( - upto)) - return (self,) + self._parent._iterate_parents(upto) + upto)) + else: + current = current._parent + + return result def _take_snapshot(self): if not self._is_transaction_boundary: @@ -267,13 +278,13 @@ def _restore_snapshot(self, dirty_only=False): del s.key for s, (oldkey, newkey) in self._key_switches.items(): - self.session.identity_map.discard(s) + self.session.identity_map.safe_discard(s) s.key = oldkey self.session.identity_map.replace(s) for s in set(self._deleted).union(self.session._deleted): if s.deleted: - #assert s in self._deleted + # assert s in self._deleted del s.deleted self.session._update_impl(s, discard_existing=True) @@ -289,19 +300,27 @@ def _remove_snapshot(self): if not self.nested and self.session.expire_on_commit: for s in self.session.identity_map.all_states(): s._expire(s.dict, self.session.identity_map._modified) - for s in self._deleted: - s.session_id = None + for s in list(self._deleted): + s._detach() self._deleted.clear() + elif self.nested: + self._parent._new.update(self._new) + self._parent._dirty.update(self._dirty) + self._parent._deleted.update(self._deleted) + self._parent._key_switches.update(self._key_switches) - - def _connection_for_bind(self, bind): + def _connection_for_bind(self, bind, execution_options): self._assert_active() if bind in self._connections: + if execution_options: + util.warn( + "Connection is already established for the " + "given bind; execution_options ignored") return self._connections[bind][0] if self._parent: - conn = self._parent._connection_for_bind(bind) + conn = self._parent._connection_for_bind(bind, execution_options) if not self.nested: return conn else: @@ -314,6 +333,9 @@ def _connection_for_bind(self, bind): else: conn = bind.contextual_connect() + if execution_options: + conn = conn.execution_options(**execution_options) + if self.session.twophase and self._parent is None: transaction = conn.begin_twophase() elif self.nested: @@ -322,7 +344,7 @@ def _connection_for_bind(self, bind): transaction = conn.begin() self._connections[conn] = self._connections[conn.engine] = \ - (conn, transaction, conn is not bind) + (conn, transaction, conn is not bind) self.session.dispatch.after_begin(self.session, self, conn) return conn @@ -350,9 +372,9 @@ def _prepare_impl(self): self.session.flush() else: raise exc.FlushError( - "Over 100 subsequent flushes have occurred within " - "session.commit() - is an after_flush() hook " - "creating new objects?") + "Over 100 subsequent flushes have occurred within " + "session.commit() - is an after_flush() hook " + "creating new objects?") if self._parent is None and self.session.twophase: try: @@ -390,26 +412,29 @@ def rollback(self, _capture_exception=False): for subtransaction in stx._iterate_parents(upto=self): subtransaction.close() + boundary = self if self._state in (ACTIVE, PREPARED): for transaction in self._iterate_parents(): if transaction._parent is None or transaction.nested: transaction._rollback_impl() transaction._state = DEACTIVE + boundary = transaction break else: transaction._state = DEACTIVE sess = self.session - if self.session._enable_transaction_accounting and \ + if sess._enable_transaction_accounting and \ not sess._is_clean(): + # if items were added, deleted, or mutated # here, we need to re-restore the snapshot util.warn( - "Session's state has been changed on " - "a non-active transaction - this state " - "will be discarded.") - self._restore_snapshot(dirty_only=self.nested) + "Session's state has been changed on " + "a non-active transaction - this state " + "will be discarded.") + boundary._restore_snapshot(dirty_only=boundary.nested) self.close() if self._parent and _capture_exception: @@ -428,11 +453,13 @@ def _rollback_impl(self): self.session.dispatch.after_rollback(self.session) - def close(self): + def close(self, invalidate=False): self.session.transaction = self._parent if self._parent is None: for connection, transaction, autoclose in \ set(self._connections.values()): + if invalidate: + connection.invalidate() if autoclose: connection.close() else: @@ -477,12 +504,13 @@ class Session(_SessionClassMethods): '__contains__', '__iter__', 'add', 'add_all', 'begin', 'begin_nested', 'close', 'commit', 'connection', 'delete', 'execute', 'expire', 'expire_all', 'expunge', 'expunge_all', 'flush', 'get_bind', - 'is_modified', + 'is_modified', 'bulk_save_objects', 'bulk_insert_mappings', + 'bulk_update_mappings', 'merge', 'query', 'refresh', 'rollback', 'scalar') def __init__(self, bind=None, autoflush=True, expire_on_commit=True, - _enable_transaction_accounting=True, + _enable_transaction_accounting=True, autocommit=False, twophase=False, weak_identity_map=True, binds=None, extension=None, info=None, @@ -497,16 +525,16 @@ def __init__(self, bind=None, autoflush=True, expire_on_commit=True, .. warning:: - The autocommit flag is **not for general use**, and if it is used, - queries should only be invoked within the span of a - :meth:`.Session.begin` / :meth:`.Session.commit` pair. Executing + The autocommit flag is **not for general use**, and if it is + used, queries should only be invoked within the span of a + :meth:`.Session.begin` / :meth:`.Session.commit` pair. Executing queries outside of a demarcated transaction is a legacy mode of usage, and can in some cases lead to concurrent connection checkouts. Defaults to ``False``. When ``True``, the - :class:`.Session` does not keep a persistent transaction running, and - will acquire connections from the engine on an as-needed basis, + :class:`.Session` does not keep a persistent transaction running, + and will acquire connections from the engine on an as-needed basis, returning them immediately after their use. Flushes will begin and commit (or possibly rollback) their own transaction if no transaction is present. When using this mode, the @@ -518,27 +546,29 @@ def __init__(self, bind=None, autoflush=True, expire_on_commit=True, :ref:`session_autocommit` :param autoflush: When ``True``, all query operations will issue a - ``flush()`` call to this ``Session`` before proceeding. This is a - convenience feature so that ``flush()`` need not be called - repeatedly in order for database queries to retrieve results. It's - typical that ``autoflush`` is used in conjunction with - ``autocommit=False``. In this scenario, explicit calls to - ``flush()`` are rarely needed; you usually only need to call - ``commit()`` (which flushes) to finalize changes. - - :param bind: An optional ``Engine`` or ``Connection`` to which this - ``Session`` should be bound. When specified, all SQL operations - performed by this session will execute via this connectable. + :meth:`~.Session.flush` call to this ``Session`` before proceeding. + This is a convenience feature so that :meth:`~.Session.flush` need + not be called repeatedly in order for database queries to retrieve + results. It's typical that ``autoflush`` is used in conjunction + with ``autocommit=False``. In this scenario, explicit calls to + :meth:`~.Session.flush` are rarely needed; you usually only need to + call :meth:`~.Session.commit` (which flushes) to finalize changes. + + :param bind: An optional :class:`.Engine` or :class:`.Connection` to + which this ``Session`` should be bound. When specified, all SQL + operations performed by this session will execute via this + connectable. :param binds: An optional dictionary which contains more granular "bind" information than the ``bind`` parameter provides. This - dictionary can map individual ``Table`` instances as well as - ``Mapper`` instances to individual ``Engine`` or ``Connection`` - objects. Operations which proceed relative to a particular - ``Mapper`` will consult this dictionary for the direct ``Mapper`` - instance as well as the mapper's ``mapped_table`` attribute in - order to locate an connectable to use. The full resolution is - described in the ``get_bind()`` method of ``Session``. + dictionary can map individual :class`.Table` + instances as well as :class:`~.Mapper` instances to individual + :class:`.Engine` or :class:`.Connection` objects. Operations which + proceed relative to a particular :class:`.Mapper` will consult this + dictionary for the direct :class:`.Mapper` instance as + well as the mapper's ``mapped_table`` attribute in order to locate + a connectable to use. The full resolution is described in the + :meth:`.Session.get_bind`. Usage looks like:: Session = sessionmaker(binds={ @@ -553,20 +583,20 @@ def __init__(self, bind=None, autoflush=True, expire_on_commit=True, :param \class_: Specify an alternate class other than ``sqlalchemy.orm.session.Session`` which should be used by the returned class. This is the only argument that is local to the - ``sessionmaker()`` function, and is not sent directly to the + :class:`.sessionmaker` function, and is not sent directly to the constructor for ``Session``. :param _enable_transaction_accounting: Defaults to ``True``. A legacy-only flag which when ``False`` disables *all* 0.5-style object accounting on transaction boundaries, including auto-expiry of instances on rollback and commit, maintenance of the "new" and - "deleted" lists upon rollback, and autoflush of pending changes upon - begin(), all of which are interdependent. + "deleted" lists upon rollback, and autoflush of pending changes + upon :meth:`~.Session.begin`, all of which are interdependent. :param expire_on_commit: Defaults to ``True``. When ``True``, all - instances will be fully expired after each ``commit()``, so that - all attribute/object access subsequent to a completed transaction - will load from the most recent database state. + instances will be fully expired after each :meth:`~.commit`, + so that all attribute/object access subsequent to a completed + transaction will load from the most recent database state. :param extension: An optional :class:`~.SessionExtension` instance, or a list @@ -575,38 +605,51 @@ def __init__(self, bind=None, autoflush=True, expire_on_commit=True, Please see :class:`.SessionEvents`. :param info: optional dictionary of arbitrary data to be associated - with this :class:`.Session`. Is available via the :attr:`.Session.info` - attribute. Note the dictionary is copied at construction time so - that modifications to the per-:class:`.Session` dictionary will be local - to that :class:`.Session`. + with this :class:`.Session`. Is available via the + :attr:`.Session.info` attribute. Note the dictionary is copied at + construction time so that modifications to the per- + :class:`.Session` dictionary will be local to that + :class:`.Session`. .. versionadded:: 0.9.0 :param query_cls: Class which should be used to create new Query - objects, as returned by the ``query()`` method. Defaults to - :class:`~sqlalchemy.orm.query.Query`. + objects, as returned by the :meth:`~.Session.query` method. + Defaults to :class:`.Query`. :param twophase: When ``True``, all transactions will be started as a "two phase" transaction, i.e. using the "two phase" semantics - of the database in use along with an XID. During a ``commit()``, - after ``flush()`` has been issued for all attached databases, the - ``prepare()`` method on each database's ``TwoPhaseTransaction`` - will be called. This allows each database to roll back the entire + of the database in use along with an XID. During a + :meth:`~.commit`, after :meth:`~.flush` has been issued for all + attached databases, the :meth:`~.TwoPhaseTransaction.prepare` + method on each database's :class:`.TwoPhaseTransaction` will be + called. This allows each database to roll back the entire transaction, before each transaction is committed. :param weak_identity_map: Defaults to ``True`` - when set to ``False``, objects placed in the :class:`.Session` will be strongly referenced until explicitly removed or the :class:`.Session` is closed. **Deprecated** - this option - is obsolete. + is present to allow compatibility with older applications, but + it is recommended that strong references to objects + be maintained by the calling application + externally to the :class:`.Session` itself, + to the extent that is required by the application. """ if weak_identity_map: self._identity_cls = identity.WeakInstanceDict else: - util.warn_deprecated("weak_identity_map=False is deprecated. " - "This feature is not needed.") + util.warn_deprecated( + "weak_identity_map=False is deprecated. " + "It is present to allow compatibility with older " + "applications, but " + "it is recommended that strong references to " + "objects be maintained by the calling application " + "externally to the :class:`.Session` itself, " + "to the extent that is required by the application.") + self._identity_cls = identity.StrongInstanceDict self.identity_map = self._identity_cls() @@ -632,15 +675,8 @@ def __init__(self, bind=None, autoflush=True, expire_on_commit=True, SessionExtension._adapt_listener(self, ext) if binds is not None: - for mapperortable, bind in binds.items(): - insp = inspect(mapperortable) - if insp.is_selectable: - self.bind_table(mapperortable, bind) - elif insp.is_mapper: - self.bind_mapper(mapperortable, bind) - else: - assert False - + for key, bind in binds.items(): + self._add_bind(key, bind) if not self.autocommit: self.begin() @@ -655,7 +691,7 @@ def __init__(self, bind=None, autoflush=True, expire_on_commit=True, def info(self): """A user-modifiable dictionary. - The initial value of this dictioanry can be populated using the + The initial value of this dictionary can be populated using the ``info`` argument to the :class:`.Session` constructor or :class:`.sessionmaker` constructor or factory methods. The dictionary here is always local to this :class:`.Session` and can be modified @@ -667,7 +703,7 @@ def info(self): return {} def begin(self, subtransactions=False, nested=False): - """Begin a transaction on this Session. + """Begin a transaction on this :class:`.Session`. If this Session is already within a transaction, either a plain transaction or nested transaction, an error is raised, unless @@ -686,7 +722,7 @@ def begin(self, subtransactions=False, nested=False): if self.transaction is not None: if subtransactions or nested: self.transaction = self.transaction._begin( - nested=nested) + nested=nested) else: raise sa_exc.InvalidRequestError( "A transaction is already begun. Use " @@ -716,7 +752,7 @@ def rollback(self): This method rolls back the current transaction or nested transaction regardless of subtransactions being in effect. All subtransactions up to the first real transaction are closed. Subtransactions occur when - begin() is called multiple times. + :meth:`.begin` is called multiple times. .. seealso:: @@ -784,9 +820,10 @@ def prepare(self): self.transaction.prepare() def connection(self, mapper=None, clause=None, - bind=None, - close_with_result=False, - **kw): + bind=None, + close_with_result=False, + execution_options=None, + **kw): """Return a :class:`.Connection` object corresponding to this :class:`.Session` object's transactional state. @@ -823,12 +860,24 @@ def connection(self, mapper=None, clause=None, etc.) which will be used to locate a bind, if a bind cannot otherwise be identified. - :param close_with_result: Passed to :meth:`.Engine.connect`, indicating - the :class:`.Connection` should be considered "single use", - automatically closing when the first result set is closed. This - flag only has an effect if this :class:`.Session` is configured with - ``autocommit=True`` and does not already have a transaction - in progress. + :param close_with_result: Passed to :meth:`.Engine.connect`, + indicating the :class:`.Connection` should be considered + "single use", automatically closing when the first result set is + closed. This flag only has an effect if this :class:`.Session` is + configured with ``autocommit=True`` and does not already have a + transaction in progress. + + :param execution_options: a dictionary of execution options that will + be passed to :meth:`.Connection.execution_options`, **when the + connection is first procured only**. If the connection is already + present within the :class:`.Session`, a warning is emitted and + the arguments are ignored. + + .. versionadded:: 0.9.9 + + .. seealso:: + + :ref:`session_transaction_isolation` :param \**kw: Additional keyword arguments are sent to :meth:`get_bind()`, @@ -840,13 +889,18 @@ def connection(self, mapper=None, clause=None, bind = self.get_bind(mapper, clause=clause, **kw) return self._connection_for_bind(bind, - close_with_result=close_with_result) + close_with_result=close_with_result, + execution_options=execution_options) - def _connection_for_bind(self, engine, **kwargs): + def _connection_for_bind(self, engine, execution_options=None, **kw): if self.transaction is not None: - return self.transaction._connection_for_bind(engine) + return self.transaction._connection_for_bind( + engine, execution_options) else: - return engine.contextual_connect(**kwargs) + conn = engine.contextual_connect(**kw) + if execution_options: + conn = conn.execution_options(**execution_options) + return conn def execute(self, clause, params=None, mapper=None, bind=None, **kw): """Execute a SQL expression construct or string statement within @@ -863,8 +917,8 @@ def execute(self, clause, params=None, mapper=None, bind=None, **kw): user_table.select().where(user_table.c.id == 5) ) - :meth:`~.Session.execute` accepts any executable clause construct, such - as :func:`~.sql.expression.select`, + :meth:`~.Session.execute` accepts any executable clause construct, + such as :func:`~.sql.expression.select`, :func:`~.sql.expression.insert`, :func:`~.sql.expression.update`, :func:`~.sql.expression.delete`, and @@ -893,7 +947,8 @@ def execute(self, clause, params=None, mapper=None, bind=None, **kw): cursor's ``execute()`` or ``executemany()`` is used to execute the statement. An INSERT construct may be invoked for a single row:: - result = session.execute(users.insert(), {"id": 7, "name": "somename"}) + result = session.execute( + users.insert(), {"id": 7, "name": "somename"}) or for multiple rows:: @@ -911,8 +966,9 @@ def execute(self, clause, params=None, mapper=None, bind=None, **kw): :class:`.Connection`, which in the average case is derived directly from the "bind" of the :class:`.Session` itself, and in other cases can be based on the :func:`.mapper` - and :class:`.Table` objects passed to the method; see the documentation - for :meth:`.Session.get_bind` for a full description of this scheme. + and :class:`.Table` objects passed to the method; see the + documentation for :meth:`.Session.get_bind` for a full description of + this scheme. The :meth:`.Session.execute` method does *not* invoke autoflush. @@ -974,8 +1030,8 @@ def execute(self, clause, params=None, mapper=None, bind=None, **kw): if bind is None: bind = self.get_bind(mapper, clause=clause, **kw) - return self._connection_for_bind(bind, close_with_result=True).execute( - clause, params or {}) + return self._connection_for_bind( + bind, close_with_result=True).execute(clause, params or {}) def scalar(self, clause, params=None, mapper=None, bind=None, **kw): """Like :meth:`~.Session.execute` but return a scalar result.""" @@ -993,10 +1049,46 @@ def close(self): not use any connection resources until they are first needed. """ + self._close_impl(invalidate=False) + + def invalidate(self): + """Close this Session, using connection invalidation. + + This is a variant of :meth:`.Session.close` that will additionally + ensure that the :meth:`.Connection.invalidate` method will be called + on all :class:`.Connection` objects. This can be called when + the database is known to be in a state where the connections are + no longer safe to be used. + + E.g.:: + + try: + sess = Session() + sess.add(User()) + sess.commit() + except gevent.Timeout: + sess.invalidate() + raise + except: + sess.rollback() + raise + + This clears all items and ends any transaction in progress. + + If this session were created with ``autocommit=False``, a new + transaction is immediately begun. Note that this new transaction does + not use any connection resources until they are first needed. + + .. versionadded:: 0.9.9 + + """ + self._close_impl(invalidate=True) + + def _close_impl(self, invalidate): self.expunge_all() if self.transaction is not None: for transaction in self.transaction._iterate_parents(): - transaction.close() + transaction.close(invalidate) def expunge_all(self): """Remove all object instances from this ``Session``. @@ -1013,43 +1105,50 @@ def expunge_all(self): self._deleted = {} # TODO: need much more test coverage for bind_mapper() and similar ! - # TODO: + crystalize + document resolution order + # TODO: + crystallize + document resolution order # vis. bind_mapper/bind_table - def bind_mapper(self, mapper, bind): - """Bind operations for a mapper to a Connectable. - - mapper - A mapper instance or mapped class + def _add_bind(self, key, bind): + try: + insp = inspect(key) + except sa_exc.NoInspectionAvailable: + if not isinstance(key, type): + raise exc.ArgumentError( + "Not acceptable bind target: %s" % + key) + else: + self.__binds[key] = bind + else: + if insp.is_selectable: + self.__binds[insp] = bind + elif insp.is_mapper: + self.__binds[insp.class_] = bind + for selectable in insp._all_tables: + self.__binds[selectable] = bind + else: + raise exc.ArgumentError( + "Not acceptable bind target: %s" % + key) - bind - Any Connectable: a ``Engine`` or ``Connection``. + def bind_mapper(self, mapper, bind): + """Associate a :class:`.Mapper` with a "bind", e.g. a :class:`.Engine` + or :class:`.Connection`. - All subsequent operations involving this mapper will use the given - `bind`. + The given mapper is added to a lookup used by the + :meth:`.Session.get_bind` method. """ - if isinstance(mapper, type): - mapper = class_mapper(mapper) - - self.__binds[mapper.base_mapper] = bind - for t in mapper._all_tables: - self.__binds[t] = bind + self._add_bind(mapper, bind) def bind_table(self, table, bind): - """Bind operations on a Table to a Connectable. + """Associate a :class:`.Table` with a "bind", e.g. a :class:`.Engine` + or :class:`.Connection`. - table - A ``Table`` instance - - bind - Any Connectable: a ``Engine`` or ``Connection``. - - All subsequent operations involving this ``Table`` will use the - given `bind`. + The given mapper is added to a lookup used by the + :meth:`.Session.get_bind` method. """ - self.__binds[table] = bind + self._add_bind(table, bind) def get_bind(self, mapper=None, clause=None): """Return a "bind" to which this :class:`.Session` is bound. @@ -1103,6 +1202,7 @@ def get_bind(self, mapper=None, clause=None): bound :class:`.MetaData`. """ + if mapper is clause is None: if self.bind: return self.bind @@ -1112,15 +1212,23 @@ def get_bind(self, mapper=None, clause=None): "Connection, and no context was provided to locate " "a binding.") - c_mapper = mapper is not None and _class_to_mapper(mapper) or None + if mapper is not None: + try: + mapper = inspect(mapper) + except sa_exc.NoInspectionAvailable: + if isinstance(mapper, type): + raise exc.UnmappedClassError(mapper) + else: + raise - # manually bound? if self.__binds: - if c_mapper: - if c_mapper.base_mapper in self.__binds: - return self.__binds[c_mapper.base_mapper] - elif c_mapper.mapped_table in self.__binds: - return self.__binds[c_mapper.mapped_table] + if mapper: + for cls in mapper.class_.__mro__: + if cls in self.__binds: + return self.__binds[cls] + if clause is None: + clause = mapper.mapped_table + if clause is not None: for t in sql_util.find_tables(clause, include_crud=True): if t in self.__binds: @@ -1132,21 +1240,22 @@ def get_bind(self, mapper=None, clause=None): if isinstance(clause, sql.expression.ClauseElement) and clause.bind: return clause.bind - if c_mapper and c_mapper.mapped_table.bind: - return c_mapper.mapped_table.bind + if mapper and mapper.mapped_table.bind: + return mapper.mapped_table.bind context = [] if mapper is not None: - context.append('mapper %s' % c_mapper) + context.append('mapper %s' % mapper) if clause is not None: context.append('SQL expression') raise sa_exc.UnboundExecutionError( "Could not locate a bind configured on %s or this Session" % ( - ', '.join(context))) + ', '.join(context))) def query(self, *entities, **kwargs): - """Return a new ``Query`` object corresponding to this ``Session``.""" + """Return a new :class:`.Query` object corresponding to this + :class:`.Session`.""" return self._query_cls(entities, self, **kwargs) @@ -1188,9 +1297,9 @@ def _autoflush(self): # with code that catches StatementError, IntegrityError, # etc. e.add_detail( - "raised as a result of Query-invoked autoflush; " - "consider using a session.no_autoflush block if this " - "flush is occuring prematurely") + "raised as a result of Query-invoked autoflush; " + "consider using a session.no_autoflush block if this " + "flush is occurring prematurely") util.raise_from_cause(e) def refresh(self, instance, attribute_names=None, lockmode=None): @@ -1327,7 +1436,7 @@ def _expire_state(self, state, attribute_names): # pre-fetch the full cascade since the expire is going to # remove associations cascaded = list(state.manager.mapper.cascade_iterator( - 'refresh-expire', state)) + 'refresh-expire', state)) self._conditional_expire(state) for o, m, st_, dct_ in cascaded: self._conditional_expire(st_) @@ -1342,7 +1451,7 @@ def _conditional_expire(self, state): state._detach() @util.deprecated("0.7", "The non-weak-referencing identity map " - "feature is no longer needed.") + "feature is no longer needed.") def prune(self): """Remove unreferenced instances cached in the identity map. @@ -1373,7 +1482,7 @@ def expunge(self, instance): state_str(state)) cascaded = list(state.manager.mapper.cascade_iterator( - 'expunge', state)) + 'expunge', state)) self._expunge_state(state) for o, m, st_, dct_ in cascaded: self._expunge_state(st_) @@ -1383,11 +1492,12 @@ def _expunge_state(self, state): self._new.pop(state) state._detach() elif self.identity_map.contains_state(state): - self.identity_map.discard(state) + self.identity_map.safe_discard(state) self._deleted.pop(state, None) state._detach() elif self.transaction: self.transaction._deleted.pop(state, None) + state._detach() def _register_newly_persistent(self, states): for state in states: @@ -1399,9 +1509,9 @@ def _register_newly_persistent(self, states): instance_key = mapper._identity_key_from_state(state) - if _none_set.issubset(instance_key[1]) and \ - not mapper.allow_partial_pks or \ - _none_set.issuperset(instance_key[1]): + if _none_set.intersection(instance_key[1]) and \ + not mapper.allow_partial_pks or \ + _none_set.issuperset(instance_key[1]): raise exc.FlushError( "Instance %s has a NULL identity key. If this is an " "auto-generated value, check that the database table " @@ -1416,10 +1526,10 @@ def _register_newly_persistent(self, states): if state.key is None: state.key = instance_key elif state.key != instance_key: - # primary key switch. use discard() in case another + # primary key switch. use safe_discard() in case another # state has already replaced this one in the identity # map (see test/orm/test_naturalpks.py ReversePKsTest) - self.identity_map.discard(state) + self.identity_map.safe_discard(state) if state in self.transaction._key_switches: orig_key = self.transaction._key_switches[state][0] else: @@ -1453,7 +1563,7 @@ def _remove_newly_deleted(self, states): if self._enable_transaction_accounting and self.transaction: self.transaction._deleted[state] = True - self.identity_map.discard(state) + self.identity_map.safe_discard(state) self._deleted.pop(state, None) state.deleted = True @@ -1491,9 +1601,9 @@ def _save_or_update_state(self, state): mapper = _state_mapper(state) for o, m, st_, dct_ in mapper.cascade_iterator( - 'save-update', - state, - halt_on=self._contains_state): + 'save-update', + state, + halt_on=self._contains_state): self._save_or_update_impl(st_) def delete(self, instance): @@ -1527,7 +1637,7 @@ def delete(self, instance): # so that autoflush does not delete the item # the strong reference to the instance itself is significant here cascade_states = list(state.manager.mapper.cascade_iterator( - 'delete', state)) + 'delete', state)) self._deleted[state] = state.obj() self.identity_map.add(state) @@ -1544,10 +1654,10 @@ def merge(self, instance, load=True): same primary key in the session. If not found locally, it attempts to load the object from the database based on primary key, and if none can be located, creates a new instance. The state of each - attribute on the source instance is then copied to the target instance. - The resulting target instance is then returned by the method; the - original source instance is left unmodified, and un-associated with the - :class:`.Session` if not already. + attribute on the source instance is then copied to the target + instance. The resulting target instance is then returned by the + method; the original source instance is left unmodified, and + un-associated with the :class:`.Session` if not already. This operation cascades to associated instances if the association is mapped with ``cascade="merge"``. @@ -1575,7 +1685,8 @@ def merge(self, instance, load=True): any existing related objects or collections that might not be loaded. The resulting objects from ``load=False`` are always produced as "clean", so it is only appropriate that the given objects - should be "clean" as well, else this suggests a mis-use of the method. + should be "clean" as well, else this suggests a mis-use of the + method. """ @@ -1593,9 +1704,9 @@ def merge(self, instance, load=True): try: self.autoflush = False return self._merge( - attributes.instance_state(instance), - attributes.instance_dict(instance), - load=load, _recursive=_recursive) + attributes.instance_state(instance), + attributes.instance_dict(instance), + load=load, _recursive=_recursive) finally: self.autoflush = autoflush @@ -1615,6 +1726,9 @@ def _merge(self, state, state_dict, load=True, _recursive=None): "all changes on mapped instances before merging with " "load=False.") key = mapper._identity_key_from_state(state) + key_is_persistent = attributes.NEVER_SET not in key[1] + else: + key_is_persistent = True if key in self.identity_map: merged = self.identity_map[key] @@ -1631,9 +1745,10 @@ def _merge(self, state, state_dict, load=True, _recursive=None): self._update_impl(merged_state) new_instance = True - elif not _none_set.issubset(key[1]) or \ - (mapper.allow_partial_pks and - not _none_set.issuperset(key[1])): + elif key_is_persistent and ( + not _none_set.intersection(key[1]) or + (mapper.allow_partial_pks and + not _none_set.issuperset(key[1]))): merged = self.query(mapper.class_).get(key[1]) else: merged = None @@ -1656,38 +1771,38 @@ def _merge(self, state, state_dict, load=True, _recursive=None): # version check if applicable if mapper.version_id_col is not None: existing_version = mapper._get_state_attr_by_column( - state, - state_dict, - mapper.version_id_col, - passive=attributes.PASSIVE_NO_INITIALIZE) + state, + state_dict, + mapper.version_id_col, + passive=attributes.PASSIVE_NO_INITIALIZE) merged_version = mapper._get_state_attr_by_column( - merged_state, - merged_dict, - mapper.version_id_col, - passive=attributes.PASSIVE_NO_INITIALIZE) + merged_state, + merged_dict, + mapper.version_id_col, + passive=attributes.PASSIVE_NO_INITIALIZE) if existing_version is not attributes.PASSIVE_NO_RESULT and \ - merged_version is not attributes.PASSIVE_NO_RESULT and \ - existing_version != merged_version: + merged_version is not attributes.PASSIVE_NO_RESULT and \ + existing_version != merged_version: raise exc.StaleDataError( - "Version id '%s' on merged state %s " - "does not match existing version '%s'. " - "Leave the version attribute unset when " - "merging to update the most recent version." - % ( - existing_version, - state_str(merged_state), - merged_version - )) + "Version id '%s' on merged state %s " + "does not match existing version '%s'. " + "Leave the version attribute unset when " + "merging to update the most recent version." + % ( + existing_version, + state_str(merged_state), + merged_version + )) merged_state.load_path = state.load_path merged_state.load_options = state.load_options for prop in mapper.iterate_properties: prop.merge(self, state, state_dict, - merged_state, merged_dict, - load, _recursive) + merged_state, merged_dict, + load, _recursive) if not load: # remove any history @@ -1706,8 +1821,8 @@ def _validate_persistent(self, state): def _save_impl(self, state): if state.key is not None: raise sa_exc.InvalidRequestError( - "Object '%s' already has an identity - it can't be registered " - "as pending" % state_str(state)) + "Object '%s' already has an identity - " + "it can't be registered as pending" % state_str(state)) self._before_attach(state) if state not in self._new: @@ -1717,7 +1832,7 @@ def _save_impl(self, state): def _update_impl(self, state, discard_existing=False): if (self.identity_map.contains_state(state) and - state not in self._deleted): + state not in self._deleted): return if state.key is None: @@ -1731,7 +1846,7 @@ def _update_impl(self, state, discard_existing=False): "function to send this object back to the transient state." % state_str(state) ) - self._before_attach(state) + self._before_attach(state, check_identity_map=False) self._deleted.pop(state, None) if discard_existing: self.identity_map.replace(state) @@ -1783,8 +1898,8 @@ def enable_relationship_loading(self, obj): is what was already loaded from a foreign-key-holding value. The :meth:`.Session.enable_relationship_loading` method is - similar to the ``load_on_pending`` flag on :func:`.relationship`. Unlike - that flag, :meth:`.Session.enable_relationship_loading` allows + similar to the ``load_on_pending`` flag on :func:`.relationship`. + Unlike that flag, :meth:`.Session.enable_relationship_loading` allows an object to remain transient while still being able to load related items. @@ -1811,19 +1926,18 @@ def enable_relationship_loading(self, obj): self._attach(state, include_before=True) state._load_pending = True - def _before_attach(self, state): + def _before_attach(self, state, check_identity_map=True): if state.session_id != self.hash_key and \ self.dispatch.before_attach: self.dispatch.before_attach(self, state.obj()) - def _attach(self, state, include_before=False): - if state.key and \ + if check_identity_map and state.key and \ state.key in self.identity_map and \ not self.identity_map.contains_state(state): - raise sa_exc.InvalidRequestError("Can't attach instance " - "%s; another instance with key %s is already " - "present in this session." - % (state_str(state), state.key)) + raise sa_exc.InvalidRequestError( + "Can't attach instance " + "%s; another instance with key %s is already " + "present in this session." % (state_str(state), state.key)) if state.session_id and \ state.session_id is not self.hash_key and \ @@ -1833,10 +1947,11 @@ def _attach(self, state, include_before=False): "(this is '%s')" % (state_str(state), state.session_id, self.hash_key)) + def _attach(self, state, include_before=False): + if state.session_id != self.hash_key: - if include_before and \ - self.dispatch.before_attach: - self.dispatch.before_attach(self, state.obj()) + if include_before: + self._before_attach(state) state.session_id = self.hash_key if state.modified and state._strong_obj is None: state._strong_obj = state.obj() @@ -1861,7 +1976,8 @@ def __iter__(self): Session. """ - return iter(list(self._new.values()) + list(self.identity_map.values())) + return iter( + list(self._new.values()) + list(self.identity_map.values())) def _contains_state(self, state): return state in self._new or self.identity_map.contains_state(state) @@ -1882,7 +1998,7 @@ def flush(self, objects=None): For ``autocommit`` Sessions with no active manual transaction, flush() will create a transaction on the fly that surrounds the entire set of - operations int the flush. + operations into the flush. :param objects: Optional; restricts the flush operation to operate only on elements that are in the given collection. @@ -1914,8 +2030,8 @@ def _flush_warning(self, method): def _is_clean(self): return not self.identity_map.check_modified() and \ - not self._deleted and \ - not self._new + not self._deleted and \ + not self._new def _flush(self, objects=None): @@ -1994,21 +2110,21 @@ def _flush(self, objects=None): len_ = len(self.identity_map._modified) statelib.InstanceState._commit_all_states( - [(state, state.dict) for state in - self.identity_map._modified], - instance_dict=self.identity_map) + [(state, state.dict) for state in + self.identity_map._modified], + instance_dict=self.identity_map) util.warn("Attribute history events accumulated on %d " - "previously clean instances " - "within inner-flush event handlers have been reset, " - "and will not result in database updates. " - "Consider using set_committed_value() within " - "inner-flush event handlers to avoid this warning." - % len_) + "previously clean instances " + "within inner-flush event handlers have been " + "reset, and will not result in database updates. " + "Consider using set_committed_value() within " + "inner-flush event handlers to avoid this warning." + % len_) # useful assertions: - #if not objects: + # if not objects: # assert not self.identity_map._modified - #else: + # else: # assert self.identity_map._modified == \ # self.identity_map._modified.difference(objects) @@ -2020,8 +2136,228 @@ def _flush(self, objects=None): with util.safe_reraise(): transaction.rollback(_capture_exception=True) + def bulk_save_objects( + self, objects, return_defaults=False, update_changed_only=True): + """Perform a bulk save of the given list of objects. + + The bulk save feature allows mapped objects to be used as the + source of simple INSERT and UPDATE operations which can be more easily + grouped together into higher performing "executemany" + operations; the extraction of data from the objects is also performed + using a lower-latency process that ignores whether or not attributes + have actually been modified in the case of UPDATEs, and also ignores + SQL expressions. + + The objects as given are not added to the session and no additional + state is established on them, unless the ``return_defaults`` flag + is also set, in which case primary key attributes and server-side + default values will be populated. + + .. versionadded:: 1.0.0 + + .. warning:: + + The bulk save feature allows for a lower-latency INSERT/UPDATE + of rows at the expense of most other unit-of-work features. + Features such as object management, relationship handling, + and SQL clause support are **silently omitted** in favor of raw + INSERT/UPDATES of records. + + **Please read the list of caveats at** :ref:`bulk_operations` + **before using this method, and fully test and confirm the + functionality of all code developed using these systems.** + + :param objects: a list of mapped object instances. The mapped + objects are persisted as is, and are **not** associated with the + :class:`.Session` afterwards. + + For each object, whether the object is sent as an INSERT or an + UPDATE is dependent on the same rules used by the :class:`.Session` + in traditional operation; if the object has the + :attr:`.InstanceState.key` + attribute set, then the object is assumed to be "detached" and + will result in an UPDATE. Otherwise, an INSERT is used. + + In the case of an UPDATE, statements are grouped based on which + attributes have changed, and are thus to be the subject of each + SET clause. If ``update_changed_only`` is False, then all + attributes present within each object are applied to the UPDATE + statement, which may help in allowing the statements to be grouped + together into a larger executemany(), and will also reduce the + overhead of checking history on attributes. + + :param return_defaults: when True, rows that are missing values which + generate defaults, namely integer primary key defaults and sequences, + will be inserted **one at a time**, so that the primary key value + is available. In particular this will allow joined-inheritance + and other multi-table mappings to insert correctly without the need + to provide primary key values ahead of time; however, + :paramref:`.Session.bulk_save_objects.return_defaults` **greatly + reduces the performance gains** of the method overall. + + :param update_changed_only: when True, UPDATE statements are rendered + based on those attributes in each state that have logged changes. + When False, all attributes present are rendered into the SET clause + with the exception of primary key attributes. + + .. seealso:: + + :ref:`bulk_operations` + + :meth:`.Session.bulk_insert_mappings` + + :meth:`.Session.bulk_update_mappings` + + """ + for (mapper, isupdate), states in itertools.groupby( + (attributes.instance_state(obj) for obj in objects), + lambda state: (state.mapper, state.key is not None) + ): + self._bulk_save_mappings( + mapper, states, isupdate, True, + return_defaults, update_changed_only) + + def bulk_insert_mappings(self, mapper, mappings, return_defaults=False): + """Perform a bulk insert of the given list of mapping dictionaries. + + The bulk insert feature allows plain Python dictionaries to be used as + the source of simple INSERT operations which can be more easily + grouped together into higher performing "executemany" + operations. Using dictionaries, there is no "history" or session + state management features in use, reducing latency when inserting + large numbers of simple rows. + + The values within the dictionaries as given are typically passed + without modification into Core :meth:`.Insert` constructs, after + organizing the values within them across the tables to which + the given mapper is mapped. + + .. versionadded:: 1.0.0 + + .. warning:: + + The bulk insert feature allows for a lower-latency INSERT + of rows at the expense of most other unit-of-work features. + Features such as object management, relationship handling, + and SQL clause support are **silently omitted** in favor of raw + INSERT of records. + + **Please read the list of caveats at** :ref:`bulk_operations` + **before using this method, and fully test and confirm the + functionality of all code developed using these systems.** + + :param mapper: a mapped class, or the actual :class:`.Mapper` object, + representing the single kind of object represented within the mapping + list. + + :param mappings: a list of dictionaries, each one containing the state + of the mapped row to be inserted, in terms of the attribute names + on the mapped class. If the mapping refers to multiple tables, + such as a joined-inheritance mapping, each dictionary must contain + all keys to be populated into all tables. + + :param return_defaults: when True, rows that are missing values which + generate defaults, namely integer primary key defaults and sequences, + will be inserted **one at a time**, so that the primary key value + is available. In particular this will allow joined-inheritance + and other multi-table mappings to insert correctly without the need + to provide primary + key values ahead of time; however, + :paramref:`.Session.bulk_insert_mappings.return_defaults` + **greatly reduces the performance gains** of the method overall. + If the rows + to be inserted only refer to a single table, then there is no + reason this flag should be set as the returned default information + is not used. + + + .. seealso:: + + :ref:`bulk_operations` + + :meth:`.Session.bulk_save_objects` + + :meth:`.Session.bulk_update_mappings` + + """ + self._bulk_save_mappings( + mapper, mappings, False, False, return_defaults, False) + + def bulk_update_mappings(self, mapper, mappings): + """Perform a bulk update of the given list of mapping dictionaries. + + The bulk update feature allows plain Python dictionaries to be used as + the source of simple UPDATE operations which can be more easily + grouped together into higher performing "executemany" + operations. Using dictionaries, there is no "history" or session + state management features in use, reducing latency when updating + large numbers of simple rows. + + .. versionadded:: 1.0.0 + + .. warning:: + + The bulk update feature allows for a lower-latency UPDATE + of rows at the expense of most other unit-of-work features. + Features such as object management, relationship handling, + and SQL clause support are **silently omitted** in favor of raw + UPDATES of records. + + **Please read the list of caveats at** :ref:`bulk_operations` + **before using this method, and fully test and confirm the + functionality of all code developed using these systems.** + + :param mapper: a mapped class, or the actual :class:`.Mapper` object, + representing the single kind of object represented within the mapping + list. + + :param mappings: a list of dictionaries, each one containing the state + of the mapped row to be updated, in terms of the attribute names + on the mapped class. If the mapping refers to multiple tables, + such as a joined-inheritance mapping, each dictionary may contain + keys corresponding to all tables. All those keys which are present + and are not part of the primary key are applied to the SET clause + of the UPDATE statement; the primary key values, which are required, + are applied to the WHERE clause. + + + .. seealso:: + + :ref:`bulk_operations` + + :meth:`.Session.bulk_insert_mappings` + + :meth:`.Session.bulk_save_objects` + + """ + self._bulk_save_mappings(mapper, mappings, True, False, False, False) + + def _bulk_save_mappings( + self, mapper, mappings, isupdate, isstates, + return_defaults, update_changed_only): + mapper = _class_to_mapper(mapper) + self._flushing = True + + transaction = self.begin( + subtransactions=True) + try: + if isupdate: + persistence._bulk_update( + mapper, mappings, transaction, + isstates, update_changed_only) + else: + persistence._bulk_insert( + mapper, mappings, transaction, isstates, return_defaults) + transaction.commit() + + except: + with util.safe_reraise(): + transaction.rollback(_capture_exception=True) + finally: + self._flushing = False + def is_modified(self, instance, include_collections=True, - passive=True): + passive=True): """Return ``True`` if the given instance has locally modified attributes. @@ -2049,12 +2385,12 @@ def is_modified(self, instance, include_collections=True, A few caveats to this method apply: - * Instances present in the :attr:`.Session.dirty` collection may report - ``False`` when tested with this method. This is because - the object may have received change events via attribute - mutation, thus placing it in :attr:`.Session.dirty`, - but ultimately the state is the same as that loaded from - the database, resulting in no net change here. + * Instances present in the :attr:`.Session.dirty` collection may + report ``False`` when tested with this method. This is because + the object may have received change events via attribute mutation, + thus placing it in :attr:`.Session.dirty`, but ultimately the state + is the same as that loaded from the database, resulting in no net + change here. * Scalar attributes may not have recorded the previously set value when a new value was applied, if the attribute was not loaded, or was expired, at the time the new value was received - in these @@ -2095,15 +2431,15 @@ def is_modified(self, instance, include_collections=True, for attr in state.manager.attributes: if \ - ( - not include_collections and - hasattr(attr.impl, 'get_collection') - ) or not hasattr(attr.impl, 'get_history'): + ( + not include_collections and + hasattr(attr.impl, 'get_collection') + ) or not hasattr(attr.impl, 'get_history'): continue (added, unchanged, deleted) = \ - attr.impl.get_history(state, dict_, - passive=attributes.NO_CHANGE) + attr.impl.get_history(state, dict_, + passive=attributes.NO_CHANGE) if added or deleted: return True @@ -2140,8 +2476,8 @@ def is_active(self): call :meth:`.Session.rollback`, in order to close out the transaction stack. It is in this "partial rollback" period that the :attr:`.is_active` flag returns False. After the call to - :meth:`.Session.rollback`, the :class:`.SessionTransaction` is replaced - with a new one and :attr:`.is_active` returns ``True`` again. + :meth:`.Session.rollback`, the :class:`.SessionTransaction` is + replaced with a new one and :attr:`.is_active` returns ``True`` again. When a :class:`.Session` is used in ``autocommit=True`` mode, the :class:`.SessionTransaction` is only instantiated within the scope @@ -2281,9 +2617,9 @@ class sessionmaker(_SessionClassMethods): """ def __init__(self, bind=None, class_=Session, autoflush=True, - autocommit=False, - expire_on_commit=True, - info=None, **kw): + autocommit=False, + expire_on_commit=True, + info=None, **kw): """Construct a new :class:`.sessionmaker`. All arguments here except for ``class_`` correspond to arguments @@ -2307,8 +2643,8 @@ def __init__(self, bind=None, class_=Session, autoflush=True, .. versionadded:: 0.9.0 - :param \**kw: all other keyword arguments are passed to the constructor - of newly created :class:`.Session` objects. + :param \**kw: all other keyword arguments are passed to the + constructor of newly created :class:`.Session` objects. """ kw['bind'] = bind @@ -2355,26 +2691,56 @@ def configure(self, **new_kw): def __repr__(self): return "%s(class_=%r,%s)" % ( - self.__class__.__name__, - self.class_.__name__, - ", ".join("%s=%r" % (k, v) for k, v in self.kw.items()) - ) - + self.__class__.__name__, + self.class_.__name__, + ", ".join("%s=%r" % (k, v) for k, v in self.kw.items()) + ) def make_transient(instance): - """Make the given instance 'transient'. + """Alter the state of the given instance so that it is :term:`transient`. + + .. note:: + + :func:`.make_transient` is a special-case function for + advanced use cases only. + + The given mapped instance is assumed to be in the :term:`persistent` or + :term:`detached` state. The function will remove its association with any + :class:`.Session` as well as its :attr:`.InstanceState.identity`. The + effect is that the object will behave as though it were newly constructed, + except retaining any attribute / collection values that were loaded at the + time of the call. The :attr:`.InstanceState.deleted` flag is also reset + if this object had been deleted as a result of using + :meth:`.Session.delete`. + + .. warning:: + + :func:`.make_transient` does **not** "unexpire" or otherwise eagerly + load ORM-mapped attributes that are not currently loaded at the time + the function is called. This includes attributes which: + + * were expired via :meth:`.Session.expire` + + * were expired as the natural effect of committing a session + transaction, e.g. :meth:`.Session.commit` + + * are normally :term:`lazy loaded` but are not currently loaded + + * are "deferred" via :ref:`deferred` and are not yet loaded + + * were not present in the query which loaded this object, such as that + which is common in joined table inheritance and other scenarios. + + After :func:`.make_transient` is called, unloaded attributes such + as those above will normally resolve to the value ``None`` when + accessed, or an empty collection for a collection-oriented attribute. + As the object is transient and un-associated with any database + identity, it will no longer retrieve these values. - This will remove its association with any - session and additionally will remove its "identity key", - such that it's as though the object were newly constructed, - except retaining its values. It also resets the - "deleted" flag on the state if this object - had been explicitly deleted by its session. + .. seealso:: - Attributes which were "expired" or deferred at the - instance level are reverted to undefined, and - will not trigger any loads. + :func:`.make_transient_to_detached` """ state = attributes.instance_state(instance) @@ -2382,26 +2748,73 @@ def make_transient(instance): if s: s._expunge_state(state) - # remove expired state and - # deferred callables - state.callables.clear() + # remove expired state + state.expired_attributes.clear() + + # remove deferred callables + if state.callables: + del state.callables + if state.key: del state.key if state.deleted: del state.deleted +def make_transient_to_detached(instance): + """Make the given transient instance :term:`detached`. + + .. note:: + + :func:`.make_transient_to_detached` is a special-case function for + advanced use cases only. + + All attribute history on the given instance + will be reset as though the instance were freshly loaded + from a query. Missing attributes will be marked as expired. + The primary key attributes of the object, which are required, will be made + into the "key" of the instance. + + The object can then be added to a session, or merged + possibly with the load=False flag, at which point it will look + as if it were loaded that way, without emitting SQL. + + This is a special use case function that differs from a normal + call to :meth:`.Session.merge` in that a given persistent state + can be manufactured without any SQL calls. + + .. versionadded:: 0.9.5 + + .. seealso:: + + :func:`.make_transient` + + """ + state = attributes.instance_state(instance) + if state.session_id or state.key: + raise sa_exc.InvalidRequestError( + "Given object must be transient") + state.key = state.mapper._identity_key_from_state(state) + if state.deleted: + del state.deleted + state._commit_all(state.dict) + state._expire_attributes(state.dict, state.unloaded) + + def object_session(instance): - """Return the ``Session`` to which instance belongs. + """Return the :class:`.Session` to which the given instance belongs. - If the instance is not a mapped instance, an error is raised. + This is essentially the same as the :attr:`.InstanceState.session` + accessor. See that attribute for details. """ try: - return _state_session(attributes.instance_state(instance)) + state = attributes.instance_state(instance) except exc.NO_STATE: raise exc.UnmappedInstanceError(instance) + else: + return _state_session(state) _new_sessionid = util.counter() diff --git a/lib/sqlalchemy/orm/state.py b/lib/sqlalchemy/orm/state.py index fb5db1fc99..c66507d5a4 100644 --- a/lib/sqlalchemy/orm/state.py +++ b/lib/sqlalchemy/orm/state.py @@ -1,5 +1,6 @@ # orm/state.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -16,11 +17,35 @@ from . import exc as orm_exc, interfaces from .path_registry import PathRegistry from .base import PASSIVE_NO_RESULT, SQL_OK, NEVER_SET, ATTR_WAS_SET, \ - NO_VALUE, PASSIVE_NO_INITIALIZE, INIT_OK, PASSIVE_OFF + NO_VALUE, PASSIVE_NO_INITIALIZE, INIT_OK, PASSIVE_OFF from . import base -class InstanceState(interfaces._InspectionAttr): - """tracks state information at the instance level.""" + +class InstanceState(interfaces.InspectionAttr): + """tracks state information at the instance level. + + The :class:`.InstanceState` is a key object used by the + SQLAlchemy ORM in order to track the state of an object; + it is created the moment an object is instantiated, typically + as a result of :term:`instrumentation` which SQLAlchemy applies + to the ``__init__()`` method of the class. + + :class:`.InstanceState` is also a semi-public object, + available for runtime inspection as to the state of a + mapped instance, including information such as its current + status within a particular :class:`.Session` and details + about data on individual attributes. The public API + in order to acquire a :class:`.InstanceState` object + is to use the :func:`.inspect` system:: + + >>> from sqlalchemy import inspect + >>> insp = inspect(some_mapped_object) + + .. seealso:: + + :ref:`core_inspection_toplevel` + + """ session_id = None key = None @@ -33,15 +58,35 @@ class InstanceState(interfaces._InspectionAttr): expired = False deleted = False _load_pending = False - is_instance = True + callables = () + """A namespace where a per-state loader callable can be associated. + + In SQLAlchemy 1.0, this is only used for lazy loaders / deferred + loaders that were set up via query option. + + Previously, callables was used also to indicate expired attributes + by storing a link to the InstanceState itself in this dictionary. + This role is now handled by the expired_attributes set. + + """ + def __init__(self, obj, manager): self.class_ = obj.__class__ self.manager = manager self.obj = weakref.ref(obj, self._cleanup) - self.callables = {} self.committed_state = {} + self.expired_attributes = set() + + expired_attributes = None + """The set of keys which are 'expired' to be loaded by + the manager's deferred scalar loader, assuming no pending + changes. + + see also the ``unmodified`` collection which is intersected + against this set when a refresh operation occurs.""" + @util.memoized_property def attrs(self): @@ -50,6 +95,9 @@ def attrs(self): and history. The returned object is an instance of :class:`.AttributeState`. + This object allows inspection of the current data + within an attribute as well as attribute history + since the last flush. """ return util.ImmutableProperties( @@ -61,25 +109,50 @@ def attrs(self): @property def transient(self): - """Return true if the object is transient.""" + """Return true if the object is :term:`transient`. + + .. seealso:: + + :ref:`session_object_states` + + """ return self.key is None and \ not self._attached @property def pending(self): - """Return true if the object is pending.""" + """Return true if the object is :term:`pending`. + + + .. seealso:: + + :ref:`session_object_states` + + """ return self.key is None and \ self._attached @property def persistent(self): - """Return true if the object is persistent.""" + """Return true if the object is :term:`persistent`. + + .. seealso:: + + :ref:`session_object_states` + + """ return self.key is not None and \ self._attached @property def detached(self): - """Return true if the object is detached.""" + """Return true if the object is :term:`detached`. + + .. seealso:: + + :ref:`session_object_states` + + """ return self.key is not None and \ not self._attached @@ -93,7 +166,16 @@ def _attached(self, sessionlib): @util.dependencies("sqlalchemy.orm.session") def session(self, sessionlib): """Return the owning :class:`.Session` for this instance, - or ``None`` if none available.""" + or ``None`` if none available. + + Note that the result here can in some cases be *different* + from that of ``obj in session``; an object that's been deleted + will report as not ``in session``, however if the transaction is + still in progress, this attribute will still refer to that session. + Only when the transaction is completed does the object become + fully detached under normal circumstances. + + """ return sessionlib._state_session(self) @property @@ -112,7 +194,7 @@ def identity(self): Returns ``None`` if the object has no primary key identity. .. note:: - An object which is transient or pending + An object which is :term:`transient` or :term:`pending` does **not** have a mapped identity until it is flushed, even if its attributes include primary key values. @@ -167,11 +249,25 @@ def _dispose(self): del self.obj def _cleanup(self, ref): + """Weakref callback cleanup. + + This callable cleans out the state when it is being garbage + collected. + + this _cleanup **assumes** that there are no strong refs to us! + Will not work otherwise! + + """ instance_dict = self._instance_dict() - if instance_dict: - instance_dict.discard(self) + if instance_dict is not None: + instance_dict._fast_discard(self) + del self._instance_dict + + # we can't possibly be in instance_dict._modified + # b.c. this is weakref cleanup only, that set + # is strong referencing! + # assert self not in instance_dict._modified - self.callables = {} self.session_id = self._strong_obj = None del self.obj @@ -180,6 +276,17 @@ def obj(self): @property def dict(self): + """Return the instance dict used by the object. + + Under normal circumstances, this is always synonymous + with the ``__dict__`` attribute of the mapped object, + unless an alternative instrumentation system has been + configured. + + In the case that the actual object has been garbage + collected, this accessor returns a blank dictionary. + + """ o = self.obj() if o is not None: return base.instance_dict(o) @@ -187,7 +294,7 @@ def dict(self): return {} def _initialize_instance(*mixed, **kwargs): - self, instance, args = mixed[0], mixed[1], mixed[2:] + self, instance, args = mixed[0], mixed[1], mixed[2:] # noqa manager = self.manager manager.dispatch.init(self, args, kwargs) @@ -195,8 +302,8 @@ def _initialize_instance(*mixed, **kwargs): try: return manager.original_init(*mixed[1:], **kwargs) except: - manager.dispatch.init_failure(self, args, kwargs) - raise + with util.safe_reraise(): + manager.dispatch.init_failure(self, args, kwargs) def get_history(self, key, passive): return self.manager[key].impl.get_history(self, self.dict, passive) @@ -213,9 +320,9 @@ def __getstate__(self): state_dict = {'instance': self.obj()} state_dict.update( (k, self.__dict__[k]) for k in ( - 'committed_state', '_pending_mutations', 'modified', 'expired', - 'callables', 'key', 'parents', 'load_options', - 'class_', + 'committed_state', '_pending_mutations', 'modified', + 'expired', 'callables', 'key', 'parents', 'load_options', + 'class_', 'expired_attributes' ) if k in self.__dict__ ) if self.load_path: @@ -242,7 +349,18 @@ def __setstate__(self, state_dict): self.parents = state_dict.get('parents', {}) self.modified = state_dict.get('modified', False) self.expired = state_dict.get('expired', False) - self.callables = state_dict.get('callables', {}) + if 'callables' in state_dict: + self.callables = state_dict['callables'] + + try: + self.expired_attributes = state_dict['expired_attributes'] + except KeyError: + self.expired_attributes = set() + # 0.9 and earlier compat + for k in list(self.callables): + if self.callables[k] is self: + self.expired_attributes.add(k) + del self.callables[k] self.__dict__.update([ (k, state_dict[k]) for k in ( @@ -252,16 +370,10 @@ def __setstate__(self, state_dict): if 'load_path' in state_dict: self.load_path = PathRegistry.\ - deserialize(state_dict['load_path']) + deserialize(state_dict['load_path']) state_dict['manager'](self, inst, state_dict) - def _initialize(self, key): - """Set this attribute to an empty value or collection, - based on the AttributeImpl in use.""" - - self.manager.get_impl(key).initialize(self, self.dict) - def _reset(self, dict_, key): """Remove the given attribute and any callables associated with it.""" @@ -269,71 +381,73 @@ def _reset(self, dict_, key): old = dict_.pop(key, None) if old is not None and self.manager[key].impl.collection: self.manager[key].impl._invalidate_collection(old) - self.callables.pop(key, None) - - def _expire_attribute_pre_commit(self, dict_, key): - """a fast expire that can be called by column loaders during a load. - - The additional bookkeeping is finished up in commit_all(). - - Should only be called for scalar attributes. - - This method is actually called a lot with joined-table - loading, when the second table isn't present in the result. - - """ - dict_.pop(key, None) - self.callables[key] = self + self.expired_attributes.discard(key) + if self.callables: + self.callables.pop(key, None) @classmethod - def _row_processor(cls, manager, fn, key): + def _instance_level_callable_processor(cls, manager, fn, key): impl = manager[key].impl if impl.collection: def _set_callable(state, dict_, row): + if 'callables' not in state.__dict__: + state.callables = {} old = dict_.pop(key, None) if old is not None: impl._invalidate_collection(old) state.callables[key] = fn else: def _set_callable(state, dict_, row): + if 'callables' not in state.__dict__: + state.callables = {} state.callables[key] = fn return _set_callable def _expire(self, dict_, modified_set): self.expired = True + if self.modified: modified_set.discard(self) + self.committed_state.clear() + self.modified = False - self.modified = False self._strong_obj = None - self.committed_state.clear() + if '_pending_mutations' in self.__dict__: + del self.__dict__['_pending_mutations'] - InstanceState._pending_mutations._reset(self) + if 'parents' in self.__dict__: + del self.__dict__['parents'] - # clear out 'parents' collection. not - # entirely clear how we can best determine - # which to remove, or not. - InstanceState.parents._reset(self) + self.expired_attributes.update( + [impl.key for impl in self.manager._scalar_loader_impls + if impl.expire_missing or impl.key in dict_] + ) - for key in self.manager: - impl = self.manager[key].impl - if impl.accepts_scalar_loader and \ - (impl.expire_missing or key in dict_): - self.callables[key] = self - old = dict_.pop(key, None) - if impl.collection and old is not None: - impl._invalidate_collection(old) + if self.callables: + for k in self.expired_attributes.intersection(self.callables): + del self.callables[k] + + for k in self.manager._collection_impl_keys.intersection(dict_): + collection = dict_.pop(k) + collection._sa_adapter.invalidated = True + + for key in self.manager._all_key_set.intersection(dict_): + del dict_[key] self.manager.dispatch.expire(self, None) def _expire_attributes(self, dict_, attribute_names): pending = self.__dict__.get('_pending_mutations', None) + callables = self.callables + for key in attribute_names: impl = self.manager[key].impl if impl.accepts_scalar_loader: - self.callables[key] = self + self.expired_attributes.add(key) + if callables and key in callables: + del callables[key] old = dict_.pop(key, None) if impl.collection and old is not None: impl._invalidate_collection(old) @@ -344,7 +458,7 @@ def _expire_attributes(self, dict_, attribute_names): self.manager.dispatch.expire(self, attribute_names) - def __call__(self, state, passive): + def _load_expired(self, state, passive): """__call__ allows the InstanceState to act as a deferred callable for loading expired attributes, which is also serializable (picklable). @@ -355,7 +469,7 @@ def __call__(self, state, passive): return PASSIVE_NO_RESULT toload = self.expired_attributes.\ - intersection(self.unmodified) + intersection(self.unmodified) self.manager.deferred_scalar_loader(self, toload) @@ -363,8 +477,7 @@ def __call__(self, state, passive): # instance state didn't have an identity, # the attributes still might be in the callables # dict. ensure they are removed. - for k in toload.intersection(self.callables): - del self.callables[k] + self.expired_attributes.clear() return ATTR_WAS_SET @@ -378,7 +491,7 @@ def unmodified_intersection(self, keys): """Return self.unmodified.intersection(keys).""" return set(keys).intersection(self.manager).\ - difference(self.committed_state) + difference(self.committed_state) @property def unloaded(self): @@ -389,32 +502,21 @@ def unloaded(self): """ return set(self.manager).\ - difference(self.committed_state).\ - difference(self.dict) + difference(self.committed_state).\ + difference(self.dict) @property def _unloaded_non_object(self): return self.unloaded.intersection( - attr for attr in self.manager - if self.manager[attr].impl.accepts_scalar_loader - ) - - @property - def expired_attributes(self): - """Return the set of keys which are 'expired' to be loaded by - the manager's deferred scalar loader, assuming no pending - changes. - - see also the ``unmodified`` collection which is intersected - against this set when a refresh operation occurs. - - """ - return set([k for k, v in self.callables.items() if v is self]) + attr for attr in self.manager + if self.manager[attr].impl.accepts_scalar_loader + ) def _instance_dict(self): return None - def _modified_event(self, dict_, attr, previous, collection=False, force=False): + def _modified_event( + self, dict_, attr, previous, collection=False, force=False): if not attr.send_modified_events: return if attr.key not in self.committed_state or force: @@ -432,6 +534,7 @@ def _modified_event(self, dict_, attr, previous, collection=False, force=False): if (self.session_id and self._strong_obj is None) \ or not self.modified: + self.modified = True instance_dict = self._instance_dict() if instance_dict: instance_dict._modified.add(self) @@ -445,14 +548,13 @@ def _modified_event(self, dict_, attr, previous, collection=False, force=False): if inst is None: raise orm_exc.ObjectDereferencedError( - "Can't emit change event for attribute '%s' - " - "parent object of type %s has been garbage " - "collected." - % ( - self.manager[attr.key], - base.state_class_str(self) - )) - self.modified = True + "Can't emit change event for attribute '%s' - " + "parent object of type %s has been garbage " + "collected." + % ( + self.manager[attr.key], + base.state_class_str(self) + )) def _commit(self, dict_, keys): """Commit attributes. @@ -469,10 +571,18 @@ def _commit(self, dict_, keys): self.expired = False - for key in set(self.callables).\ - intersection(keys).\ - intersection(dict_): - del self.callables[key] + self.expired_attributes.difference_update( + set(keys).intersection(dict_)) + + # the per-keys commit removes object-level callables, + # while that of commit_all does not. it's not clear + # if this behavior has a clear rationale, however tests do + # ensure this is what it does. + if self.callables: + for key in set(self.callables).\ + intersection(keys).\ + intersection(dict_): + del self.callables[key] def _commit_all(self, dict_, instance_dict=None): """commit all attributes unconditionally. @@ -483,7 +593,8 @@ def _commit_all(self, dict_, instance_dict=None): - all attributes are marked as "committed" - the "strong dirty reference" is removed - the "modified" flag is set to False - - any "expired" markers/callables for attributes loaded are removed. + - any "expired" markers for scalar attributes loaded are removed. + - lazy load callables for objects / collections *stay* Attributes marked as "expired" can potentially remain "expired" after this step if a value was not populated in state.dict. @@ -493,16 +604,17 @@ def _commit_all(self, dict_, instance_dict=None): @classmethod def _commit_all_states(self, iter, instance_dict=None): - """Mass version of commit_all().""" + """Mass / highly inlined version of commit_all().""" for state, dict_ in iter: + state_dict = state.__dict__ + state.committed_state.clear() - InstanceState._pending_mutations._reset(state) - callables = state.callables - for key in list(callables): - if key in dict_ and callables[key] is state: - del callables[key] + if '_pending_mutations' in state_dict: + del state_dict['_pending_mutations'] + + state.expired_attributes.difference_update(dict_) if instance_dict and state.modified: instance_dict._modified.discard(state) @@ -550,7 +662,7 @@ def value(self): """ return self.state.manager[self.key].__get__( - self.state.obj(), self.state.class_) + self.state.obj(), self.state.class_) @property def history(self): @@ -569,7 +681,7 @@ def history(self): """ return self.state.get_history(self.key, - PASSIVE_NO_INITIALIZE) + PASSIVE_NO_INITIALIZE) def load_history(self): """Return the current pre-flush change history for @@ -588,8 +700,7 @@ def load_history(self): """ return self.state.get_history(self.key, - PASSIVE_OFF ^ INIT_OK) - + PASSIVE_OFF ^ INIT_OK) class PendingCollection(object): @@ -600,6 +711,7 @@ class PendingCollection(object): PendingCollection are applied to it to produce the final result. """ + def __init__(self): self.deleted_items = util.IdentitySet() self.added_items = util.OrderedIdentitySet() diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py index 4a07e78569..37f60e1a2b 100644 --- a/lib/sqlalchemy/orm/strategies.py +++ b/lib/sqlalchemy/orm/strategies.py @@ -1,5 +1,6 @@ # orm/strategies.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -12,27 +13,30 @@ from ..sql import util as sql_util, visitors from .. import sql from . import ( - attributes, interfaces, exc as orm_exc, loading, - unitofwork, util as orm_util - ) + attributes, interfaces, exc as orm_exc, loading, + unitofwork, util as orm_util +) from .state import InstanceState from .util import _none_set from . import properties from .interfaces import ( LoaderStrategy, StrategizedProperty - ) +) +from .base import _SET_DEFERRED_EXPIRED, _DEFER_FOR_STATE from .session import _state_session import itertools -def _register_attribute(strategy, mapper, useobject, - compare_function=None, - typecallable=None, - uselist=False, - callable_=None, - proxy_property=None, - active_history=False, - impl_class=None, - **kw + +def _register_attribute( + strategy, mapper, useobject, + compare_function=None, + typecallable=None, + uselist=False, + callable_=None, + proxy_property=None, + active_history=False, + impl_class=None, + **kw ): prop = strategy.parent_property @@ -47,9 +51,10 @@ def _register_attribute(strategy, mapper, useobject, if prop.key in prop.parent.validators: fn, opts = prop.parent.validators[prop.key] listen_hooks.append( - lambda desc, prop: orm_util._validator_events(desc, - prop.key, fn, **opts) - ) + lambda desc, prop: orm_util._validator_events( + desc, + prop.key, fn, **opts) + ) if useobject: listen_hooks.append(unitofwork.track_cascade_events) @@ -59,13 +64,27 @@ def _register_attribute(strategy, mapper, useobject, backref = kw.pop('backref', None) if backref: listen_hooks.append( - lambda desc, prop: attributes.backref_listeners(desc, - backref, - uselist) + lambda desc, prop: attributes.backref_listeners( + desc, + backref, + uselist + ) ) + # a single MapperProperty is shared down a class inheritance + # hierarchy, so we set up attribute instrumentation and backref event + # for each mapper down the hierarchy. + + # typically, "mapper" is the same as prop.parent, due to the way + # the configure_mappers() process runs, however this is not strongly + # enforced, and in the case of a second configure_mappers() run the + # mapper here might not be prop.parent; also, a subclass mapper may + # be called here before a superclass mapper. That is, can't depend + # on mappers not already being set up so we have to check each one. + for m in mapper.self_and_descendants: - if prop is m._props.get(prop.key): + if prop is m._props.get(prop.key) and \ + not m.class_manager._attr_has_impl(prop.key): desc = attributes.register_attribute_impl( m.class_, @@ -75,8 +94,9 @@ def _register_attribute(strategy, mapper, useobject, compare_function=compare_function, useobject=useobject, extension=attribute_ext, - trackparent=useobject and (prop.single_parent - or prop.direction is interfaces.ONETOMANY), + trackparent=useobject and ( + prop.single_parent or + prop.direction is interfaces.ONETOMANY), typecallable=typecallable, callable_=callable_, active_history=active_history, @@ -84,11 +104,12 @@ def _register_attribute(strategy, mapper, useobject, send_modified_events=not useobject or not prop.viewonly, doc=prop.doc, **kw - ) + ) for hook in listen_hooks: hook(desc, prop) + @properties.ColumnProperty.strategy_for(instrument=False, deferred=False) class UninstrumentedColumnLoader(LoaderStrategy): """Represent the a non-instrumented MapperProperty. @@ -97,19 +118,24 @@ class UninstrumentedColumnLoader(LoaderStrategy): if the argument is against the with_polymorphic selectable. """ + __slots__ = 'columns', + def __init__(self, parent): super(UninstrumentedColumnLoader, self).__init__(parent) self.columns = self.parent_property.columns - def setup_query(self, context, entity, path, loadopt, adapter, - column_collection=None, **kwargs): + def setup_query( + self, context, entity, path, loadopt, adapter, + column_collection=None, **kwargs): for c in self.columns: if adapter: c = adapter.columns[c] column_collection.append(c) - def create_row_processor(self, context, path, loadopt, mapper, row, adapter): - return None, None, None + def create_row_processor( + self, context, path, loadopt, + mapper, result, adapter, populators): + pass @log.class_logger @@ -117,48 +143,55 @@ def create_row_processor(self, context, path, loadopt, mapper, row, adapter): class ColumnLoader(LoaderStrategy): """Provide loading behavior for a :class:`.ColumnProperty`.""" + __slots__ = 'columns', 'is_composite' + def __init__(self, parent): super(ColumnLoader, self).__init__(parent) self.columns = self.parent_property.columns self.is_composite = hasattr(self.parent_property, 'composite_class') - def setup_query(self, context, entity, path, loadopt, - adapter, column_collection, **kwargs): + def setup_query( + self, context, entity, path, loadopt, + adapter, column_collection, memoized_populators, **kwargs): + for c in self.columns: if adapter: c = adapter.columns[c] column_collection.append(c) + fetch = self.columns[0] + if adapter: + fetch = adapter.columns[fetch] + memoized_populators[self.parent_property] = fetch + def init_class_attribute(self, mapper): self.is_class_level = True coltype = self.columns[0].type # TODO: check all columns ? check for foreign key as well? active_history = self.parent_property.active_history or \ - self.columns[0].primary_key or \ - mapper.version_id_col in set(self.columns) + self.columns[0].primary_key or \ + mapper.version_id_col in set(self.columns) - _register_attribute(self, mapper, useobject=False, + _register_attribute( + self, mapper, useobject=False, compare_function=coltype.compare_values, active_history=active_history - ) + ) - def create_row_processor(self, context, path, - loadopt, mapper, row, adapter): - key = self.key + def create_row_processor( + self, context, path, + loadopt, mapper, result, adapter, populators): # look through list of columns represented here # to see which, if any, is present in the row. for col in self.columns: if adapter: col = adapter.columns[col] - if col is not None and col in row: - def fetch_col(state, dict_, row): - dict_[key] = row[col] - return fetch_col, None, None + getter = result._getter(col, False) + if getter: + populators["quick"].append((self.key, getter)) + break else: - def expire_for_non_present_col(state, dict_, row): - state._expire_attribute_pre_commit(dict_, key) - return expire_for_non_present_col, None, None - + populators["expire"].append((self.key, True)) @log.class_logger @@ -166,55 +199,72 @@ def expire_for_non_present_col(state, dict_, row): class DeferredColumnLoader(LoaderStrategy): """Provide loading behavior for a deferred :class:`.ColumnProperty`.""" + __slots__ = 'columns', 'group' + def __init__(self, parent): super(DeferredColumnLoader, self).__init__(parent) if hasattr(self.parent_property, 'composite_class'): raise NotImplementedError("Deferred loading for composite " - "types not implemented yet") + "types not implemented yet") self.columns = self.parent_property.columns self.group = self.parent_property.group - def create_row_processor(self, context, path, loadopt, mapper, row, adapter): - col = self.columns[0] - if adapter: - col = adapter.columns[col] + def create_row_processor( + self, context, path, loadopt, + mapper, result, adapter, populators): - key = self.key - if col in row: - return self.parent_property._get_strategy_by_cls(ColumnLoader).\ - create_row_processor( - context, path, loadopt, mapper, row, adapter) - - elif not self.is_class_level: - set_deferred_for_local_state = InstanceState._row_processor( - mapper.class_manager, - LoadDeferredColumns(key), key) - return set_deferred_for_local_state, None, None + # this path currently does not check the result + # for the column; this is because in most cases we are + # working just with the setup_query() directive which does + # not support this, and the behavior here should be consistent. + if not self.is_class_level: + set_deferred_for_local_state = \ + self.parent_property._deferred_column_loader + populators["new"].append((self.key, set_deferred_for_local_state)) else: - def reset_col_for_deferred(state, dict_, row): - # reset state on the key so that deferred callables - # fire off on next access. - state._reset(dict_, key) - return reset_col_for_deferred, None, None + populators["expire"].append((self.key, False)) def init_class_attribute(self, mapper): self.is_class_level = True - _register_attribute(self, mapper, useobject=False, - compare_function=self.columns[0].type.compare_values, - callable_=self._load_for_state, - expire_missing=False + _register_attribute( + self, mapper, useobject=False, + compare_function=self.columns[0].type.compare_values, + callable_=self._load_for_state, + expire_missing=False ) - def setup_query(self, context, entity, path, loadopt, adapter, - only_load_props=None, **kwargs): + def setup_query( + self, context, entity, path, loadopt, + adapter, column_collection, memoized_populators, + only_load_props=None, **kw): + if ( - loadopt and self.group and - loadopt.local_opts.get('undefer_group', False) == self.group - ) or (only_load_props and self.key in only_load_props): + ( + loadopt and + 'undefer_pks' in loadopt.local_opts and + set(self.columns).intersection( + self.parent._should_undefer_in_wildcard) + ) + or + ( + loadopt and + self.group and + loadopt.local_opts.get('undefer_group_%s' % self.group, False) + ) + or + ( + only_load_props and self.key in only_load_props + ) + ): self.parent_property._get_strategy_by_cls(ColumnLoader).\ - setup_query(context, entity, - path, loadopt, adapter, **kwargs) + setup_query(context, entity, + path, loadopt, adapter, + column_collection, memoized_populators, **kw) + elif self.is_class_level: + memoized_populators[self.parent_property] = _SET_DEFERRED_EXPIRED + else: + memoized_populators[self.parent_property] = _DEFER_FOR_STATE def _load_for_state(self, state, passive): if not state.key: @@ -227,12 +277,12 @@ def _load_for_state(self, state, passive): if self.group: toload = [ - p.key for p in - localparent.iterate_properties - if isinstance(p, StrategizedProperty) and - isinstance(p.strategy, DeferredColumnLoader) and - p.group == self.group - ] + p.key for p in + localparent.iterate_properties + if isinstance(p, StrategizedProperty) and + isinstance(p.strategy, DeferredColumnLoader) and + p.group == self.group + ] else: toload = [self.key] @@ -245,17 +295,17 @@ def _load_for_state(self, state, passive): "Parent instance %s is not bound to a Session; " "deferred load operation of attribute '%s' cannot proceed" % (orm_util.state_str(state), self.key) - ) + ) query = session.query(localparent) - if loading.load_on_ident(query, state.key, - only_load_props=group, refresh_state=state) is None: + if loading.load_on_ident( + query, state.key, + only_load_props=group, refresh_state=state) is None: raise orm_exc.ObjectDeletedError(state) return attributes.ATTR_WAS_SET - class LoadDeferredColumns(object): """serializable loader object used by DeferredColumnLoader""" @@ -271,10 +321,11 @@ def __call__(self, state, passive=attributes.PASSIVE_OFF): return strategy._load_for_state(state, passive) - class AbstractRelationshipLoader(LoaderStrategy): """LoaderStratgies which deal with related objects.""" + __slots__ = 'mapper', 'target', 'uselist' + def __init__(self, parent): super(AbstractRelationshipLoader, self).__init__(parent) self.mapper = self.parent_property.mapper @@ -282,7 +333,6 @@ def __init__(self, parent): self.uselist = self.parent_property.uselist - @log.class_logger @properties.RelationshipProperty.strategy_for(lazy="noload") @properties.RelationshipProperty.strategy_for(lazy=None) @@ -292,53 +342,65 @@ class NoLoader(AbstractRelationshipLoader): """ + __slots__ = () + def init_class_attribute(self, mapper): self.is_class_level = True - _register_attribute(self, mapper, + _register_attribute( + self, mapper, useobject=True, uselist=self.parent_property.uselist, typecallable=self.parent_property.collection_class, ) - def create_row_processor(self, context, path, loadopt, mapper, row, adapter): + def create_row_processor( + self, context, path, loadopt, mapper, + result, adapter, populators): def invoke_no_load(state, dict_, row): - state._initialize(self.key) - return invoke_no_load, None, None - + if self.uselist: + state.manager.get_impl(self.key).initialize(state, dict_) + else: + dict_[self.key] = None + populators["new"].append((self.key, invoke_no_load)) @log.class_logger @properties.RelationshipProperty.strategy_for(lazy=True) @properties.RelationshipProperty.strategy_for(lazy="select") -class LazyLoader(AbstractRelationshipLoader): +class LazyLoader(AbstractRelationshipLoader, util.MemoizedSlots): """Provide loading behavior for a :class:`.RelationshipProperty` with "lazy=True", that is loads when first accessed. """ + __slots__ = ( + '_lazywhere', '_rev_lazywhere', 'use_get', '_bind_to_col', + '_equated_columns', '_rev_bind_to_col', '_rev_equated_columns', + '_simple_lazy_clause') + def __init__(self, parent): super(LazyLoader, self).__init__(parent) join_condition = self.parent_property._join_condition self._lazywhere, \ - self._bind_to_col, \ - self._equated_columns = join_condition.create_lazy_clause() + self._bind_to_col, \ + self._equated_columns = join_condition.create_lazy_clause() self._rev_lazywhere, \ - self._rev_bind_to_col, \ - self._rev_equated_columns = join_condition.create_lazy_clause( - reverse_direction=True) + self._rev_bind_to_col, \ + self._rev_equated_columns = join_condition.create_lazy_clause( + reverse_direction=True) self.logger.info("%s lazy loading clause %s", self, self._lazywhere) # determine if our "lazywhere" clause is the same as the mapper's # get() clause. then we can just use mapper.get() self.use_get = not self.uselist and \ - self.mapper._get_clause[0].compare( - self._lazywhere, - use_proxies=True, - equivalents=self.mapper._equivalent_columns - ) + self.mapper._get_clause[0].compare( + self._lazywhere, + use_proxies=True, + equivalents=self.mapper._equivalent_columns + ) if self.use_get: for col in list(self._equated_columns): @@ -347,7 +409,7 @@ def __init__(self, parent): self._equated_columns[c] = self._equated_columns[col] self.logger.info("%s will use query.get() to " - "optimize instance loads" % self) + "optimize instance loads", self) def init_class_attribute(self, mapper): self.is_class_level = True @@ -364,7 +426,8 @@ def init_class_attribute(self, mapper): # will enable active_history # in that case. otherwise we don't need the # "old" value during backref operations. - _register_attribute(self, + _register_attribute( + self, mapper, useobject=True, callable_=self._load_for_state, @@ -374,91 +437,66 @@ def init_class_attribute(self, mapper): active_history=active_history ) - def lazy_clause(self, state, reverse_direction=False, - alias_secondary=False, - adapt_source=None, - passive=None): + def _memoized_attr__simple_lazy_clause(self): + criterion, bind_to_col = ( + self._lazywhere, + self._bind_to_col + ) + + params = [] + + def visit_bindparam(bindparam): + bindparam.unique = False + if bindparam._identifying_key in bind_to_col: + params.append(( + bindparam.key, bind_to_col[bindparam._identifying_key], + None)) + else: + params.append((bindparam.key, None, bindparam.value)) + + criterion = visitors.cloned_traverse( + criterion, {}, {'bindparam': visit_bindparam} + ) + + return criterion, params + + def _generate_lazy_clause(self, state, passive): + criterion, param_keys = self._simple_lazy_clause + if state is None: - return self._lazy_none_clause( - reverse_direction, - adapt_source=adapt_source) - - if not reverse_direction: - criterion, bind_to_col, rev = \ - self._lazywhere, \ - self._bind_to_col, \ - self._equated_columns - else: - criterion, bind_to_col, rev = \ - self._rev_lazywhere, \ - self._rev_bind_to_col, \ - self._rev_equated_columns + return sql_util.adapt_criterion_to_null( + criterion, [key for key, ident, value in param_keys]) - if reverse_direction: - mapper = self.parent_property.mapper - else: - mapper = self.parent_property.parent + mapper = self.parent_property.parent o = state.obj() # strong ref dict_ = attributes.instance_dict(o) - # use the "committed state" only if we're in a flush - # for this state. + if passive & attributes.INIT_OK: + passive ^= attributes.INIT_OK - if passive and passive & attributes.LOAD_AGAINST_COMMITTED: - def visit_bindparam(bindparam): - if bindparam._identifying_key in bind_to_col: - bindparam.callable = \ - lambda: mapper._get_committed_state_attr_by_column( - state, dict_, - bind_to_col[bindparam._identifying_key]) - else: - def visit_bindparam(bindparam): - if bindparam._identifying_key in bind_to_col: - bindparam.callable = \ - lambda: mapper._get_state_attr_by_column( - state, dict_, - bind_to_col[bindparam._identifying_key]) - - if self.parent_property.secondary is not None and alias_secondary: - criterion = sql_util.ClauseAdapter( - self.parent_property.secondary.alias()).\ - traverse(criterion) - - criterion = visitors.cloned_traverse( - criterion, {}, {'bindparam': visit_bindparam}) - - if adapt_source: - criterion = adapt_source(criterion) - return criterion - - def _lazy_none_clause(self, reverse_direction=False, adapt_source=None): - if not reverse_direction: - criterion, bind_to_col, rev = \ - self._lazywhere, \ - self._bind_to_col,\ - self._equated_columns - else: - criterion, bind_to_col, rev = \ - self._rev_lazywhere, \ - self._rev_bind_to_col, \ - self._rev_equated_columns + params = {} + for key, ident, value in param_keys: + if ident is not None: + if passive and passive & attributes.LOAD_AGAINST_COMMITTED: + value = mapper._get_committed_state_attr_by_column( + state, dict_, ident, passive) + else: + value = mapper._get_state_attr_by_column( + state, dict_, ident, passive) - criterion = sql_util.adapt_criterion_to_null(criterion, bind_to_col) + params[key] = value - if adapt_source: - criterion = adapt_source(criterion) - return criterion + return criterion, params def _load_for_state(self, state, passive): - if not state.key and \ - ( + if not state.key and ( ( not self.parent_property.load_on_pending and not state._load_pending ) or not state.session_id - ): + ): return attributes.ATTR_EMPTY pending = not state.key @@ -500,7 +538,7 @@ def _load_for_state(self, state, passive): if instance is not None: return instance elif not passive & attributes.SQL_OK or \ - not passive & attributes.RELATED_OBJECT_OK: + not passive & attributes.RELATED_OBJECT_OK: return attributes.PASSIVE_NO_RESULT return self._emit_lazyload(session, state, ident_key, passive) @@ -517,17 +555,18 @@ def _get_ident_for_use_get(self, session, state, passive): return [ get_attr( - state, - dict_, - self._equated_columns[pk], - passive=passive) + state, + dict_, + self._equated_columns[pk], + passive=passive) for pk in self.mapper.primary_key ] @util.dependencies("sqlalchemy.orm.strategy_options") - def _emit_lazyload(self, strategy_options, session, state, ident_key, passive): - q = session.query(self.mapper)._adapt_all_clauses() + def _emit_lazyload( + self, strategy_options, session, state, ident_key, passive): + q = session.query(self.mapper)._adapt_all_clauses() if self.parent_property.secondary is not None: q = q.select_from(self.mapper, self.parent_property.secondary) @@ -539,7 +578,6 @@ def _emit_lazyload(self, strategy_options, session, state, ident_key, passive): if pending or passive & attributes.NO_AUTOFLUSH: q = q.autoflush(False) - if state.load_path: q = q._with_current_path(state.load_path[self.parent_property]) @@ -556,19 +594,22 @@ def _emit_lazyload(self, strategy_options, session, state, ident_key, passive): # reverse props that are MANYTOONE are loading *this* # object from get(), so don't need to eager out to those. if rev.direction is interfaces.MANYTOONE and \ - rev._use_get and \ - not isinstance(rev.strategy, LazyLoader): - q = q.options(strategy_options.Load(rev.parent).lazyload(rev.key)) + rev._use_get and \ + not isinstance(rev.strategy, LazyLoader): + q = q.options( + strategy_options.Load(rev.parent).lazyload(rev.key)) - lazy_clause = self.lazy_clause(state, passive=passive) + lazy_clause, params = self._generate_lazy_clause( + state, passive=passive) if pending: - bind_values = sql_util.bind_values(lazy_clause) - if None in bind_values: + if util.has_intersection( + orm_util._none_set, params.values()): return None + elif util.has_intersection(orm_util._never_set, params.values()): + return None - q = q.filter(lazy_clause) - + q = q.filter(lazy_clause).params(params) result = q.all() if self.uselist: @@ -586,8 +627,9 @@ def _emit_lazyload(self, strategy_options, session, state, ident_key, passive): else: return None - def create_row_processor(self, context, path, loadopt, - mapper, row, adapter): + def create_row_processor( + self, context, path, loadopt, + mapper, result, adapter, populators): key = self.key if not self.is_class_level: # we are not the primary manager for this attribute @@ -598,12 +640,12 @@ def create_row_processor(self, context, path, loadopt, # "lazyload" option on a "no load" # attribute - "eager" attributes always have a # class-level lazyloader installed. - set_lazy_callable = InstanceState._row_processor( - mapper.class_manager, - LoadLazyAttribute(key), key) + set_lazy_callable = InstanceState._instance_level_callable_processor( + mapper.class_manager, + LoadLazyAttribute(key, self._strategy_keys[0]), key) - return set_lazy_callable, None, None - else: + populators["new"].append((self.key, set_lazy_callable)) + elif context.populate_existing or mapper.always_refresh: def reset_for_lazy_callable(state, dict_, row): # we are the primary manager for this attribute on # this class - reset its @@ -615,78 +657,89 @@ def reset_for_lazy_callable(state, dict_, row): # any existing state. state._reset(dict_, key) - return reset_for_lazy_callable, None, None - + populators["new"].append((self.key, reset_for_lazy_callable)) class LoadLazyAttribute(object): """serializable loader object used by LazyLoader""" - def __init__(self, key): + def __init__(self, key, strategy_key=(('lazy', 'select'),)): self.key = key + self.strategy_key = strategy_key def __call__(self, state, passive=attributes.PASSIVE_OFF): key = self.key instance_mapper = state.manager.mapper prop = instance_mapper._props[key] - strategy = prop._strategies[LazyLoader] + strategy = prop._strategies[self.strategy_key] return strategy._load_for_state(state, passive) @properties.RelationshipProperty.strategy_for(lazy="immediate") class ImmediateLoader(AbstractRelationshipLoader): + __slots__ = () + def init_class_attribute(self, mapper): self.parent_property.\ - _get_strategy_by_cls(LazyLoader).\ - init_class_attribute(mapper) + _get_strategy_by_cls(LazyLoader).\ + init_class_attribute(mapper) - def setup_query(self, context, entity, - path, loadopt, adapter, column_collection=None, - parentmapper=None, **kwargs): + def setup_query( + self, context, entity, + path, loadopt, adapter, column_collection=None, + parentmapper=None, **kwargs): pass - def create_row_processor(self, context, path, loadopt, - mapper, row, adapter): + def create_row_processor( + self, context, path, loadopt, + mapper, result, adapter, populators): def load_immediate(state, dict_, row): state.get_impl(self.key).get(state, dict_) - return None, None, load_immediate + populators["delayed"].append((self.key, load_immediate)) @log.class_logger @properties.RelationshipProperty.strategy_for(lazy="subquery") class SubqueryLoader(AbstractRelationshipLoader): + __slots__ = 'join_depth', + def __init__(self, parent): super(SubqueryLoader, self).__init__(parent) self.join_depth = self.parent_property.join_depth def init_class_attribute(self, mapper): self.parent_property.\ - _get_strategy_by_cls(LazyLoader).\ - init_class_attribute(mapper) + _get_strategy_by_cls(LazyLoader).\ + init_class_attribute(mapper) - def setup_query(self, context, entity, - path, loadopt, adapter, - column_collection=None, - parentmapper=None, **kwargs): + def setup_query( + self, context, entity, + path, loadopt, adapter, + column_collection=None, + parentmapper=None, **kwargs): if not context.query._enable_eagerloads: return + elif context.query._yield_per: + context.query._no_yield_per("subquery") path = path[self.parent_property] # build up a path indicating the path from the leftmost # entity to the thing we're subquery loading. - with_poly_info = path.get(context.attributes, - "path_with_polymorphic", None) + with_poly_info = path.get( + context.attributes, + "path_with_polymorphic", None) if with_poly_info is not None: effective_entity = with_poly_info.entity else: effective_entity = self.mapper - subq_path = context.attributes.get(('subquery_path', None), - orm_util.PathRegistry.root) + subq_path = context.attributes.get( + ('subquery_path', None), + orm_util.PathRegistry.root) subq_path = subq_path + path @@ -699,19 +752,19 @@ def setup_query(self, context, entity, elif subq_path.contains_mapper(self.mapper): return - subq_mapper, leftmost_mapper, leftmost_attr, leftmost_relationship = \ - self._get_leftmost(subq_path) + leftmost_mapper, leftmost_attr, leftmost_relationship = \ + self._get_leftmost(subq_path) orig_query = context.attributes.get( - ("orig_query", SubqueryLoader), - context.query) + ("orig_query", SubqueryLoader), + context.query) # generate a new Query from the original, then # produce a subquery from it. left_alias = self._generate_from_original_query( - orig_query, leftmost_mapper, - leftmost_attr, leftmost_relationship, - entity.mapper + orig_query, leftmost_mapper, + leftmost_attr, leftmost_relationship, + entity.entity_zero ) # generate another Query that will join the @@ -724,15 +777,15 @@ def setup_query(self, context, entity, ("orig_query", SubqueryLoader): orig_query, ('subquery_path', None): subq_path } - q = q._enable_single_crit(False) + q = q._set_enable_single_crit(False) to_join, local_attr, parent_alias = \ - self._prep_for_joins(left_alias, subq_path) + self._prep_for_joins(left_alias, subq_path) q = q.order_by(*local_attr) q = q.add_columns(*local_attr) - - q = self._apply_joins(q, to_join, left_alias, - parent_alias, effective_entity) + q = self._apply_joins( + q, to_join, left_alias, + parent_alias, effective_entity) q = self._setup_options(q, subq_path, orig_query, effective_entity) q = self._setup_outermost_orderby(q) @@ -746,26 +799,30 @@ def _get_leftmost(self, subq_path): subq_mapper = orm_util._class_to_mapper(subq_path[0]) # determine attributes of the leftmost mapper - if self.parent.isa(subq_mapper) and self.parent_property is subq_path[1]: + if self.parent.isa(subq_mapper) and \ + self.parent_property is subq_path[1]: leftmost_mapper, leftmost_prop = \ - self.parent, self.parent_property + self.parent, self.parent_property else: leftmost_mapper, leftmost_prop = \ - subq_mapper, \ - subq_path[1] + subq_mapper, \ + subq_path[1] leftmost_cols = leftmost_prop.local_columns leftmost_attr = [ - leftmost_mapper._columntoproperty[c].class_attribute + getattr( + subq_path[0].entity, + leftmost_mapper._columntoproperty[c].key) for c in leftmost_cols ] - return subq_mapper, leftmost_mapper, leftmost_attr, leftmost_prop - def _generate_from_original_query(self, - orig_query, leftmost_mapper, - leftmost_attr, leftmost_relationship, - entity_mapper + return leftmost_mapper, leftmost_attr, leftmost_prop + + def _generate_from_original_query( + self, + orig_query, leftmost_mapper, + leftmost_attr, leftmost_relationship, orig_entity ): # reformat the original query # to look only for significant columns @@ -773,9 +830,8 @@ def _generate_from_original_query(self, # set a real "from" if not present, as this is more # accurate than just going off of the column expression - if not q._from_obj and entity_mapper.isa(leftmost_mapper): - q._set_select_from([entity_mapper], False) - + if not q._from_obj and orig_entity.mapper.isa(leftmost_mapper): + q._set_select_from([orig_entity], False) target_cols = q._adapt_col_list(leftmost_attr) # select from the identity columns of the outer @@ -804,8 +860,9 @@ def _generate_from_original_query(self, # which we'll join onto. embed_q = q.with_labels().subquery() - left_alias = orm_util.AliasedClass(leftmost_mapper, embed_q, - use_mapper_path=True) + left_alias = orm_util.AliasedClass( + leftmost_mapper, embed_q, + use_mapper_path=True) return left_alias def _prep_for_joins(self, left_alias, subq_path): @@ -844,13 +901,15 @@ def _prep_for_joins(self, left_alias, subq_path): # in the vast majority of cases, and [ticket:2014] # illustrates a case where sub_path[-2] is a subclass # of self.parent - parent_alias = orm_util.AliasedClass(to_join[-1][0], - use_mapper_path=True) + parent_alias = orm_util.AliasedClass( + to_join[-1][0], + use_mapper_path=True) else: # if of_type() were used leading to this relationship, # self.parent is more specific than subq_path[-2] - parent_alias = orm_util.AliasedClass(self.parent, - use_mapper_path=True) + parent_alias = orm_util.AliasedClass( + self.parent, + use_mapper_path=True) local_cols = self.parent_property.local_columns @@ -860,8 +919,9 @@ def _prep_for_joins(self, left_alias, subq_path): ] return to_join, local_attr, parent_alias - def _apply_joins(self, q, to_join, left_alias, parent_alias, - effective_entity): + def _apply_joins( + self, q, to_join, left_alias, parent_alias, + effective_entity): for i, (mapper, key) in enumerate(to_join): # we need to use query.join() as opposed to @@ -883,9 +943,9 @@ def _apply_joins(self, q, to_join, left_alias, parent_alias, else: if last and effective_entity is not self.mapper: attr = getattr(parent_alias, key).\ - of_type(effective_entity) + of_type(effective_entity) else: - attr = key + attr = getattr(mapper.entity, key) if second_to_last: q = q.join(parent_alias, attr, from_joinpoint=True) @@ -912,12 +972,12 @@ def _setup_outermost_orderby(self, q): # right now. eagerjoin = q._from_obj[0] eager_order_by = \ - eagerjoin._target_adapter.\ - copy_and_process( - util.to_list( - self.parent_property.order_by - ) - ) + eagerjoin._target_adapter.\ + copy_and_process( + util.to_list( + self.parent_property.order_by + ) + ) q = q.order_by(*eager_order_by) return q @@ -950,20 +1010,27 @@ def loader(self, state, dict_, row): if self._data is None: self._load() - def create_row_processor(self, context, path, loadopt, - mapper, row, adapter): + def create_row_processor( + self, context, path, loadopt, + mapper, result, adapter, populators): if not self.parent.class_manager[self.key].impl.supports_population: raise sa_exc.InvalidRequestError( - "'%s' does not support object " - "population - eager loading cannot be applied." % - self) + "'%s' does not support object " + "population - eager loading cannot be applied." % + self) path = path[self.parent_property] subq = path.get(context.attributes, 'subquery') if subq is None: - return None, None, None + return + + assert subq.session is context.session, ( + "Subquery session doesn't refer to that of " + "our context. Are there broken context caching " + "schemes being used?" + ) local_cols = self.parent_property.local_columns @@ -979,22 +1046,28 @@ def create_row_processor(self, context, path, loadopt, local_cols = [adapter.columns[c] for c in local_cols] if self.uselist: - return self._create_collection_loader(collections, local_cols) + self._create_collection_loader( + context, collections, local_cols, populators) else: - return self._create_scalar_loader(collections, local_cols) + self._create_scalar_loader( + context, collections, local_cols, populators) - def _create_collection_loader(self, collections, local_cols): + def _create_collection_loader( + self, context, collections, local_cols, populators): def load_collection_from_subq(state, dict_, row): collection = collections.get( tuple([row[col] for col in local_cols]), () ) state.get_impl(self.key).\ - set_committed_value(state, dict_, collection) + set_committed_value(state, dict_, collection) - return load_collection_from_subq, None, None, collections.loader + populators["new"].append((self.key, load_collection_from_subq)) + if context.invoke_all_eagers: + populators["eager"].append((self.key, collections.loader)) - def _create_scalar_loader(self, collections, local_cols): + def _create_scalar_loader( + self, context, collections, local_cols, populators): def load_scalar_from_subq(state, dict_, row): collection = collections.get( tuple([row[col] for col in local_cols]), @@ -1008,10 +1081,11 @@ def load_scalar_from_subq(state, dict_, row): scalar = collection[0] state.get_impl(self.key).\ - set_committed_value(state, dict_, scalar) - - return load_scalar_from_subq, None, None, collections.loader + set_committed_value(state, dict_, scalar) + populators["new"].append((self.key, load_scalar_from_subq)) + if context.invoke_all_eagers: + populators["eager"].append((self.key, collections.loader)) @log.class_logger @@ -1022,6 +1096,9 @@ class JoinedLoader(AbstractRelationshipLoader): using joined eager loading. """ + + __slots__ = 'join_depth', + def __init__(self, parent): super(JoinedLoader, self).__init__(parent) self.join_depth = self.parent_property.join_depth @@ -1030,20 +1107,24 @@ def init_class_attribute(self, mapper): self.parent_property.\ _get_strategy_by_cls(LazyLoader).init_class_attribute(mapper) - def setup_query(self, context, entity, path, loadopt, adapter, \ - column_collection=None, parentmapper=None, - **kwargs): - """Add a left outer join to the statement thats being constructed.""" + def setup_query( + self, context, entity, path, loadopt, adapter, + column_collection=None, parentmapper=None, + chained_from_outerjoin=False, + **kwargs): + """Add a left outer join to the statement that's being constructed.""" if not context.query._enable_eagerloads: return + elif context.query._yield_per and self.uselist: + context.query._no_yield_per("joined collection") path = path[self.parent_property] with_polymorphic = None user_defined_adapter = self._init_user_defined_eager_proc( - loadopt, context) if loadopt else False + loadopt, context) if loadopt else False if user_defined_adapter is not False: clauses, adapter, add_to_collection = \ @@ -1061,9 +1142,10 @@ def setup_query(self, context, entity, path, loadopt, adapter, \ elif path.contains_mapper(self.mapper): return - clauses, adapter, add_to_collection = self._generate_row_adapter( + clauses, adapter, add_to_collection, chained_from_outerjoin = \ + self._generate_row_adapter( context, entity, path, loadopt, adapter, - column_collection, parentmapper + column_collection, parentmapper, chained_from_outerjoin ) with_poly_info = path.get( @@ -1078,23 +1160,20 @@ def setup_query(self, context, entity, path, loadopt, adapter, \ path = path[self.mapper] - for value in self.mapper._iterate_polymorphic_properties( - mappers=with_polymorphic): - value.setup( - context, - entity, - path, - clauses, - parentmapper=self.mapper, - column_collection=add_to_collection) + loading._setup_entity_query( + context, self.mapper, entity, + path, clauses, add_to_collection, + with_polymorphic=with_polymorphic, + parentmapper=self.mapper, + chained_from_outerjoin=chained_from_outerjoin) if with_poly_info is not None and \ - None in set(context.secondary_columns): + None in set(context.secondary_columns): raise sa_exc.InvalidRequestError( - "Detected unaliased columns when generating joined " - "load. Make sure to use aliased=True or flat=True " - "when using joined loading with with_polymorphic()." - ) + "Detected unaliased columns when generating joined " + "load. Make sure to use aliased=True or flat=True " + "when using joined loading with with_polymorphic()." + ) def _init_user_defined_eager_proc(self, loadopt, context): @@ -1107,8 +1186,9 @@ def _init_user_defined_eager_proc(self, loadopt, context): # the option applies. check if the "user_defined_eager_row_processor" # has been built up. - adapter = path.get(context.attributes, - "user_defined_eager_row_processor", False) + adapter = path.get( + context.attributes, + "user_defined_eager_row_processor", False) if adapter is not False: # just return it return adapter @@ -1126,25 +1206,30 @@ def _init_user_defined_eager_proc(self, loadopt, context): if alias is not None: if isinstance(alias, str): alias = prop.target.alias(alias) - adapter = sql_util.ColumnAdapter(alias, - equivalents=prop.mapper._equivalent_columns) + adapter = sql_util.ColumnAdapter( + alias, + equivalents=prop.mapper._equivalent_columns) else: if path.contains(context.attributes, "path_with_polymorphic"): - with_poly_info = path.get(context.attributes, - "path_with_polymorphic") + with_poly_info = path.get( + context.attributes, + "path_with_polymorphic") adapter = orm_util.ORMAdapter( - with_poly_info.entity, - equivalents=prop.mapper._equivalent_columns) + with_poly_info.entity, + equivalents=prop.mapper._equivalent_columns) else: - adapter = context.query._polymorphic_adapters.get(prop.mapper, None) - path.set(context.attributes, - "user_defined_eager_row_processor", - adapter) + adapter = context.query._polymorphic_adapters.get( + prop.mapper, None) + path.set( + context.attributes, + "user_defined_eager_row_processor", + adapter) return adapter - def _setup_query_on_user_defined_adapter(self, context, entity, - path, adapter, user_defined_adapter): + def _setup_query_on_user_defined_adapter( + self, context, entity, + path, adapter, user_defined_adapter): # apply some more wrapping to the "user defined adapter" # if we are setting up the query for SQL render. @@ -1152,20 +1237,22 @@ def _setup_query_on_user_defined_adapter(self, context, entity, if adapter and user_defined_adapter: user_defined_adapter = user_defined_adapter.wrap(adapter) - path.set(context.attributes, "user_defined_eager_row_processor", - user_defined_adapter) + path.set( + context.attributes, "user_defined_eager_row_processor", + user_defined_adapter) elif adapter: user_defined_adapter = adapter - path.set(context.attributes, "user_defined_eager_row_processor", - user_defined_adapter) + path.set( + context.attributes, "user_defined_eager_row_processor", + user_defined_adapter) add_to_collection = context.primary_columns return user_defined_adapter, adapter, add_to_collection - def _generate_row_adapter(self, - context, entity, path, loadopt, adapter, - column_collection, parentmapper - ): + def _generate_row_adapter( + self, + context, entity, path, loadopt, adapter, + column_collection, parentmapper, chained_from_outerjoin): with_poly_info = path.get( context.attributes, "path_with_polymorphic", @@ -1174,39 +1261,49 @@ def _generate_row_adapter(self, if with_poly_info: to_adapt = with_poly_info.entity else: - to_adapt = orm_util.AliasedClass(self.mapper, - flat=True, - use_mapper_path=True) + to_adapt = orm_util.AliasedClass( + self.mapper, + flat=True, + use_mapper_path=True) clauses = orm_util.ORMAdapter( - to_adapt, - equivalents=self.mapper._equivalent_columns, - adapt_required=True) + to_adapt, + equivalents=self.mapper._equivalent_columns, + adapt_required=True, allow_label_resolve=False, + anonymize_labels=True) assert clauses.aliased_class is not None - if self.parent_property.direction != interfaces.MANYTOONE: + if self.parent_property.uselist: context.multi_row_eager_loaders = True innerjoin = ( - loadopt.local_opts.get( - 'innerjoin', self.parent_property.innerjoin) - if loadopt is not None - else self.parent_property.innerjoin - ) + loadopt.local_opts.get( + 'innerjoin', self.parent_property.innerjoin) + if loadopt is not None + else self.parent_property.innerjoin + ) + + if not innerjoin: + # if this is an outer join, all non-nested eager joins from + # this path must also be outer joins + chained_from_outerjoin = True context.create_eager_joins.append( - (self._create_eager_join, context, - entity, path, adapter, - parentmapper, clauses, innerjoin) + ( + self._create_eager_join, context, + entity, path, adapter, + parentmapper, clauses, innerjoin, chained_from_outerjoin + ) ) add_to_collection = context.secondary_columns path.set(context.attributes, "eager_row_processor", clauses) - return clauses, adapter, add_to_collection + return clauses, adapter, add_to_collection, chained_from_outerjoin - def _create_eager_join(self, context, entity, - path, adapter, parentmapper, - clauses, innerjoin): + def _create_eager_join( + self, context, entity, + path, adapter, parentmapper, + clauses, innerjoin, chained_from_outerjoin): if parentmapper is None: localparent = entity.mapper @@ -1223,10 +1320,9 @@ def _create_eager_join(self, context, entity, if entity not in context.eager_joins and \ not should_nest_selectable and \ - context.from_clause: - index, clause = \ - sql_util.find_join_source( - context.from_clause, entity.selectable) + context.from_clause: + index, clause = sql_util.find_join_source( + context.from_clause, entity.selectable) if clause is not None: # join to an existing FROM clause on the query. # key it to its list index in the eager_joins dict. @@ -1241,57 +1337,60 @@ def _create_eager_join(self, context, entity, if adapter: if getattr(adapter, 'aliased_class', None): + # joining from an adapted entity. The adapted entity + # might be a "with_polymorphic", so resolve that to our + # specific mapper's entity before looking for our attribute + # name on it. + efm = inspect(adapter.aliased_class).\ + _entity_for_mapper( + parentmapper + if parentmapper.isa(self.parent) else self.parent) + + # look for our attribute on the adapted entity, else fall back + # to our straight property onclause = getattr( - adapter.aliased_class, self.key, - self.parent_property) + efm.entity, self.key, + self.parent_property) else: onclause = getattr( - orm_util.AliasedClass( - self.parent, - adapter.selectable, - use_mapper_path=True - ), - self.key, self.parent_property - ) + orm_util.AliasedClass( + self.parent, + adapter.selectable, + use_mapper_path=True + ), + self.key, self.parent_property + ) else: onclause = self.parent_property assert clauses.aliased_class is not None - join_to_outer = innerjoin and isinstance(towrap, sql.Join) and towrap.isouter - - if join_to_outer and innerjoin == 'nested': - inner = orm_util.join( - towrap.right, - clauses.aliased_class, - onclause, - isouter=False - ) - - eagerjoin = orm_util.join( - towrap.left, - inner, - towrap.onclause, - isouter=True - ) - eagerjoin._target_adapter = inner._target_adapter + attach_on_outside = ( + not chained_from_outerjoin or + not innerjoin or innerjoin == 'unnested') + + if attach_on_outside: + # this is the "classic" eager join case. + eagerjoin = orm_util._ORMJoin( + towrap, + clauses.aliased_class, + onclause, + isouter=not innerjoin or ( + chained_from_outerjoin and isinstance(towrap, sql.Join) + ), _left_memo=self.parent, _right_memo=self.mapper + ) else: - if join_to_outer: - innerjoin = False - eagerjoin = orm_util.join( - towrap, - clauses.aliased_class, - onclause, - isouter=not innerjoin - ) + # all other cases are innerjoin=='nested' approach + eagerjoin = self._splice_nested_inner_join( + path, towrap, clauses, onclause) + context.eager_joins[entity_key] = eagerjoin # send a hint to the Query as to where it may "splice" this join eagerjoin.stop_on = entity.selectable - if self.parent_property.secondary is None and \ - not parentmapper: + if not parentmapper: # for parentclause that is the non-eager end of the join, # ensure all the parent cols in the primaryjoin are actually # in the @@ -1300,24 +1399,83 @@ def _create_eager_join(self, context, entity, # This has the effect # of "undefering" those columns. for col in sql_util._find_columns( - self.parent_property.primaryjoin): + self.parent_property.primaryjoin): if localparent.mapped_table.c.contains_column(col): if adapter: col = adapter.columns[col] context.primary_columns.append(col) if self.parent_property.order_by: - context.eager_order_by += \ - eagerjoin._target_adapter.\ - copy_and_process( - util.to_list( - self.parent_property.order_by - ) - ) - - def _create_eager_adapter(self, context, row, adapter, path, loadopt): + context.eager_order_by += eagerjoin._target_adapter.\ + copy_and_process( + util.to_list( + self.parent_property.order_by + ) + ) + + def _splice_nested_inner_join( + self, path, join_obj, clauses, onclause, splicing=False): + + if splicing is False: + # first call is always handed a join object + # from the outside + assert isinstance(join_obj, orm_util._ORMJoin) + elif isinstance(join_obj, sql.selectable.FromGrouping): + return self._splice_nested_inner_join( + path, join_obj.element, clauses, onclause, splicing + ) + elif not isinstance(join_obj, orm_util._ORMJoin): + if path[-2] is splicing: + return orm_util._ORMJoin( + join_obj, clauses.aliased_class, + onclause, isouter=False, + _left_memo=splicing, + _right_memo=path[-1].mapper + ) + else: + # only here if splicing == True + return None + + target_join = self._splice_nested_inner_join( + path, join_obj.right, clauses, + onclause, join_obj._right_memo) + if target_join is None: + right_splice = False + target_join = self._splice_nested_inner_join( + path, join_obj.left, clauses, + onclause, join_obj._left_memo) + if target_join is None: + # should only return None when recursively called, + # e.g. splicing==True + assert splicing is not False, \ + "assertion failed attempting to produce joined eager loads" + return None + else: + right_splice = True + + if right_splice: + # for a right splice, attempt to flatten out + # a JOIN b JOIN c JOIN .. to avoid needless + # parenthesis nesting + if not join_obj.isouter and not target_join.isouter: + eagerjoin = join_obj._splice_into_center(target_join) + else: + eagerjoin = orm_util._ORMJoin( + join_obj.left, target_join, + join_obj.onclause, isouter=join_obj.isouter, + _left_memo=join_obj._left_memo) + else: + eagerjoin = orm_util._ORMJoin( + target_join, join_obj.right, + join_obj.onclause, isouter=join_obj.isouter, + _right_memo=join_obj._right_memo) + + eagerjoin._target_adapter = target_join._target_adapter + return eagerjoin + + def _create_eager_adapter(self, context, result, adapter, path, loadopt): user_defined_adapter = self._init_user_defined_eager_proc( - loadopt, context) if loadopt else False + loadopt, context) if loadopt else False if user_defined_adapter is not False: decorator = user_defined_adapter @@ -1333,56 +1491,61 @@ def _create_eager_adapter(self, context, row, adapter, path, loadopt): if decorator is None: return False - try: - self.mapper.identity_key_from_row(row, decorator) + if self.mapper._result_has_identity_key(result, decorator): return decorator - except KeyError: - # no identity key - dont return a row + else: + # no identity key - don't return a row # processor, will cause a degrade to lazy return False - def create_row_processor(self, context, path, loadopt, mapper, row, adapter): + def create_row_processor( + self, context, path, loadopt, mapper, + result, adapter, populators): if not self.parent.class_manager[self.key].impl.supports_population: raise sa_exc.InvalidRequestError( - "'%s' does not support object " - "population - eager loading cannot be applied." % - self) + "'%s' does not support object " + "population - eager loading cannot be applied." % + self + ) our_path = path[self.parent_property] eager_adapter = self._create_eager_adapter( - context, - row, - adapter, our_path, loadopt) + context, + result, + adapter, our_path, loadopt) if eager_adapter is not False: key = self.key - _instance = loading.instance_processor( - self.mapper, - context, - our_path[self.mapper], - eager_adapter) + _instance = loading._instance_processor( + self.mapper, + context, + result, + our_path[self.mapper], + eager_adapter) if not self.uselist: - return self._create_scalar_loader(context, key, _instance) + self._create_scalar_loader(context, key, _instance, populators) else: - return self._create_collection_loader(context, key, _instance) + self._create_collection_loader( + context, key, _instance, populators) else: - return self.parent_property.\ - _get_strategy_by_cls(LazyLoader).\ - create_row_processor( - context, path, loadopt, - mapper, row, adapter) + self.parent_property._get_strategy_by_cls(LazyLoader).\ + create_row_processor( + context, path, loadopt, + mapper, result, adapter, populators) - def _create_collection_loader(self, context, key, _instance): + def _create_collection_loader(self, context, key, _instance, populators): def load_collection_from_joined_new_row(state, dict_, row): collection = attributes.init_state_collection( - state, dict_, key) + state, dict_, key) result_list = util.UniqueAppender(collection, 'append_without_event') context.attributes[(state, key)] = result_list - _instance(row, result_list) + inst = _instance(row) + if inst is not None: + result_list.append(inst) def load_collection_from_joined_existing_row(state, dict_, row): if (state, key) in context.attributes: @@ -1392,46 +1555,52 @@ def load_collection_from_joined_existing_row(state, dict_, row): # with isnew=False when self-referential eager loading # is used; the same instance may be present in two # distinct sets of result columns - collection = attributes.init_state_collection(state, - dict_, key) + collection = attributes.init_state_collection( + state, dict_, key) result_list = util.UniqueAppender( - collection, - 'append_without_event') + collection, + 'append_without_event') context.attributes[(state, key)] = result_list - _instance(row, result_list) + inst = _instance(row) + if inst is not None: + result_list.append(inst) def load_collection_from_joined_exec(state, dict_, row): - _instance(row, None) + _instance(row) - return load_collection_from_joined_new_row, \ - load_collection_from_joined_existing_row, \ - None, load_collection_from_joined_exec + populators["new"].append((self.key, load_collection_from_joined_new_row)) + populators["existing"].append( + (self.key, load_collection_from_joined_existing_row)) + if context.invoke_all_eagers: + populators["eager"].append( + (self.key, load_collection_from_joined_exec)) - def _create_scalar_loader(self, context, key, _instance): + def _create_scalar_loader(self, context, key, _instance, populators): def load_scalar_from_joined_new_row(state, dict_, row): # set a scalar object instance directly on the parent # object, bypassing InstrumentedAttribute event handlers. - dict_[key] = _instance(row, None) + dict_[key] = _instance(row) def load_scalar_from_joined_existing_row(state, dict_, row): # call _instance on the row, even though the object has # been created, so that we further descend into properties - existing = _instance(row, None) + existing = _instance(row) if existing is not None \ and key in dict_ \ - and existing is not dict_[key]: + and existing is not dict_[key]: util.warn( "Multiple rows returned with " "uselist=False for eagerly-loaded attribute '%s' " % self) def load_scalar_from_joined_exec(state, dict_, row): - _instance(row, None) - - return load_scalar_from_joined_new_row, \ - load_scalar_from_joined_existing_row, \ - None, load_scalar_from_joined_exec + _instance(row) + populators["new"].append((self.key, load_scalar_from_joined_new_row)) + populators["existing"].append( + (self.key, load_scalar_from_joined_existing_row)) + if context.invoke_all_eagers: + populators["eager"].append((self.key, load_scalar_from_joined_exec)) def single_parent_validator(desc, prop): @@ -1453,7 +1622,9 @@ def append(state, value, initiator): def set_(state, value, oldvalue, initiator): return _do_check(state, value, oldvalue, initiator) - event.listen(desc, 'append', append, raw=True, retval=True, - active_history=True) - event.listen(desc, 'set', set_, raw=True, retval=True, - active_history=True) + event.listen( + desc, 'append', append, raw=True, retval=True, + active_history=True) + event.listen( + desc, 'set', set_, raw=True, retval=True, + active_history=True) diff --git a/lib/sqlalchemy/orm/strategy_options.py b/lib/sqlalchemy/orm/strategy_options.py index 317fc0813c..be792aa435 100644 --- a/lib/sqlalchemy/orm/strategy_options.py +++ b/lib/sqlalchemy/orm/strategy_options.py @@ -1,5 +1,5 @@ -# orm/strategy_options.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -15,17 +15,20 @@ from .base import _is_aliased_class, _class_to_mapper from . import util as orm_util from .path_registry import PathRegistry, TokenRegistry, \ - _WILDCARD_TOKEN, _DEFAULT_TOKEN + _WILDCARD_TOKEN, _DEFAULT_TOKEN + class Load(Generative, MapperOption): """Represents loader options which modify the state of a - :class:`.Query` in order to affect how various mapped attributes are loaded. + :class:`.Query` in order to affect how various mapped attributes are + loaded. .. versionadded:: 0.9.0 The :meth:`.Load` system is a new foundation for the existing system of loader options, including options such as - :func:`.orm.joinedload`, :func:`.orm.defer`, and others. In particular, - it introduces a new method-chained system that replaces the need for - dot-separated paths as well as "_all()" options such as :func:`.orm.joinedload_all`. + :func:`.orm.joinedload`, :func:`.orm.defer`, and others. In + particular, it introduces a new method-chained system that replaces the + need for dot-separated paths as well as "_all()" options such as + :func:`.orm.joinedload_all`. A :class:`.Load` object can be used directly or indirectly. To use one directly, instantiate given the parent class. This style of usage is @@ -40,11 +43,12 @@ class Load(Generative, MapperOption): session.query(MyClass).options(myopt) The :class:`.Load` construct is invoked indirectly whenever one makes use - of the various loader options that are present in ``sqlalchemy.orm``, including - options such as :func:`.orm.joinedload`, :func:`.orm.defer`, :func:`.orm.subqueryload`, - and all the rest. These constructs produce an "anonymous" form of the - :class:`.Load` object which tracks attributes and options, but is not linked - to a parent class until it is associated with a parent :class:`.Query`:: + of the various loader options that are present in ``sqlalchemy.orm``, + including options such as :func:`.orm.joinedload`, :func:`.orm.defer`, + :func:`.orm.subqueryload`, and all the rest. These constructs produce an + "anonymous" form of the :class:`.Load` object which tracks attributes and + options, but is not linked to a parent class until it is associated with a + parent :class:`.Query`:: # produce "unbound" Load object myopt = joinedload("widgets") @@ -54,11 +58,12 @@ class Load(Generative, MapperOption): session.query(MyClass).options(myopt) Whether the direct or indirect style is used, the :class:`.Load` object - returned now represents a specific "path" along the entities of a :class:`.Query`. - This path can be traversed using a standard method-chaining approach. - Supposing a class hierarchy such as ``User``, ``User.addresses -> Address``, - ``User.orders -> Order`` and ``Order.items -> Item``, we can specify a variety - of loader options along each element in the "path":: + returned now represents a specific "path" along the entities of a + :class:`.Query`. This path can be traversed using a standard + method-chaining approach. Supposing a class hierarchy such as ``User``, + ``User.addresses -> Address``, ``User.orders -> Order`` and + ``Order.items -> Item``, we can specify a variety of loader options along + each element in the "path":: session.query(User).options( joinedload("addresses"), @@ -66,11 +71,12 @@ class Load(Generative, MapperOption): ) Where above, the ``addresses`` collection will be joined-loaded, the - ``orders`` collection will be subquery-loaded, and within that subquery load - the ``items`` collection will be joined-loaded. + ``orders`` collection will be subquery-loaded, and within that subquery + load the ``items`` collection will be joined-loaded. """ + def __init__(self, entity): insp = inspect(entity) self.path = insp._path_registry @@ -82,6 +88,7 @@ def _generate(self): cloned.local_opts = {} return cloned + _merge_into_path = False strategy = None propagate_to_loaders = False @@ -105,7 +112,7 @@ def _generate_path(self, path, attr, wildcard_key, raiseerr=True): if raiseerr and not path.has_entity: if isinstance(path, TokenRegistry): raise sa_exc.ArgumentError( - "Wildcard token cannot be followed by another entity") + "Wildcard token cannot be followed by another entity") else: raise sa_exc.ArgumentError( "Attribute '%s' of entity '%s' does not " @@ -144,8 +151,9 @@ def _generate_path(self, path, attr, wildcard_key, raiseerr=True): if not prop.parent.common_parent(path.mapper): if raiseerr: - raise sa_exc.ArgumentError("Attribute '%s' does not " - "link from element '%s'" % (attr, path.entity)) + raise sa_exc.ArgumentError( + "Attribute '%s' does not " + "link from element '%s'" % (attr, path.entity)) else: return None @@ -154,13 +162,16 @@ def _generate_path(self, path, attr, wildcard_key, raiseerr=True): ext_info = inspect(ac) path_element = ext_info.mapper + existing = path.entity_path[prop].get( + self.context, "path_with_polymorphic") if not ext_info.is_aliased_class: ac = orm_util.with_polymorphic( - ext_info.mapper.base_mapper, - ext_info.mapper, aliased=True, - _use_mapper_path=True) - path.entity_path[prop].set(self.context, - "path_with_polymorphic", inspect(ac)) + ext_info.mapper.base_mapper, + ext_info.mapper, aliased=True, + _use_mapper_path=True, + _existing_alias=existing) + path.entity_path[prop].set( + self.context, "path_with_polymorphic", inspect(ac)) path = path[prop][path_element] else: path = path[prop] @@ -169,13 +180,17 @@ def _generate_path(self, path, attr, wildcard_key, raiseerr=True): path = path.entity_path return path + def __str__(self): + return "Load(strategy=%r)" % (self.strategy, ) + def _coerce_strat(self, strategy): if strategy is not None: strategy = tuple(sorted(strategy.items())) return strategy @_generative - def set_relationship_strategy(self, attr, strategy, propagate_to_loaders=True): + def set_relationship_strategy( + self, attr, strategy, propagate_to_loaders=True): strategy = self._coerce_strat(strategy) self.propagate_to_loaders = propagate_to_loaders @@ -200,7 +215,15 @@ def set_column_strategy(self, attrs, strategy, opts=None): cloned._set_path_strategy() def _set_path_strategy(self): - if self.path.has_entity: + if self._merge_into_path: + # special helper for undefer_group + existing = self.path.get(self.context, "loader") + if existing: + existing.local_opts.update(self.local_opts) + else: + self.path.set(self.context, "loader", self) + + elif self.path.has_entity: self.path.parent.set(self.context, "loader", self) else: self.path.set(self.context, "loader", self) @@ -224,14 +247,15 @@ def _chop_path(self, to_chop, path): if i == 0 and c_token.endswith(':' + _DEFAULT_TOKEN): return to_chop - elif c_token != 'relationship:%s' % (_WILDCARD_TOKEN,) and c_token != p_token.key: + elif c_token != 'relationship:%s' % (_WILDCARD_TOKEN,) and \ + c_token != p_token.key: return None if c_token is p_token: continue else: return None - return to_chop[i+1:] + return to_chop[i + 1:] class _UnboundLoad(Load): @@ -244,6 +268,7 @@ class _UnboundLoad(Load): of freestanding options, e.g. ``joinedload('x.y.z')``. """ + def __init__(self): self.path = () self._to_bind = set() @@ -317,14 +342,15 @@ def _split_key(key): return opt - def _chop_path(self, to_chop, path): i = -1 - for i, (c_token, (p_mapper, p_prop)) in enumerate(zip(to_chop, path.pairs())): + for i, (c_token, (p_mapper, p_prop)) in enumerate( + zip(to_chop, path.pairs())): if isinstance(c_token, util.string_types): if i == 0 and c_token.endswith(':' + _DEFAULT_TOKEN): return to_chop - elif c_token != 'relationship:%s' % (_WILDCARD_TOKEN,) and c_token != p_prop.key: + elif c_token != 'relationship:%s' % ( + _WILDCARD_TOKEN,) and c_token != p_prop.key: return None elif isinstance(c_token, PropComparator): if c_token.property is not p_prop: @@ -334,7 +360,6 @@ def _chop_path(self, to_chop, path): return to_chop[i:] - def _bind_loader(self, query, context, raiseerr): start_path = self.path # _current_path implies we're in a @@ -348,20 +373,21 @@ def _bind_loader(self, query, context, raiseerr): return None token = start_path[0] + if isinstance(token, util.string_types): entity = self._find_entity_basestring(query, token, raiseerr) elif isinstance(token, PropComparator): prop = token.property entity = self._find_entity_prop_comparator( - query, - prop.key, - token._parententity, - raiseerr) + query, + prop.key, + token._parententity, + raiseerr) else: raise sa_exc.ArgumentError( - "mapper option expects " - "string key or list of attributes") + "mapper option expects " + "string key or list of attributes") if not entity: return @@ -377,7 +403,7 @@ def _bind_loader(self, query, context, raiseerr): path = loader.path for token in start_path: loader.path = path = loader._generate_path( - loader.path, token, None, raiseerr) + loader.path, token, None, raiseerr) if path is None: return @@ -389,12 +415,29 @@ def _bind_loader(self, query, context, raiseerr): effective_path = loader.path # prioritize "first class" options over those - # that were "links in the chain", e.g. "x" and "y" in someload("x.y.z") - # versus someload("x") / someload("x.y") - if self._is_chain_link: - effective_path.setdefault(context, "loader", loader) + # that were "links in the chain", e.g. "x" and "y" in + # someload("x.y.z") versus someload("x") / someload("x.y") + + if effective_path.is_token: + for path in effective_path.generate_for_superclasses(): + if self._merge_into_path: + # special helper for undefer_group + existing = path.get(context, "loader") + if existing: + existing.local_opts.update(self.local_opts) + else: + path.set(context, "loader", loader) + elif self._is_chain_link: + path.setdefault(context, "loader", loader) + else: + path.set(context, "loader", loader) else: - effective_path.set(context, "loader", loader) + # only supported for the undefer_group() wildcard opt + assert not self._merge_into_path + if self._is_chain_link: + effective_path.setdefault(context, "loader", loader) + else: + effective_path.set(context, "loader", loader) def _find_entity_prop_comparator(self, query, token, mapper, raiseerr): if _is_aliased_class(mapper): @@ -410,7 +453,7 @@ def _find_entity_prop_comparator(self, query, token, mapper, raiseerr): raise sa_exc.ArgumentError( "Query has only expression-based entities - " "can't find property named '%s'." - % (token, ) + % (token, ) ) else: raise sa_exc.ArgumentError( @@ -418,7 +461,7 @@ def _find_entity_prop_comparator(self, query, token, mapper, raiseerr): "specified in this Query. Note the full path " "from root (%s) to target entity must be specified." % (token, ",".join(str(x) for - x in query._mapper_entities)) + x in query._mapper_entities)) ) else: return None @@ -428,9 +471,9 @@ def _find_entity_basestring(self, query, token, raiseerr): if len(list(query._mapper_entities)) != 1: if raiseerr: raise sa_exc.ArgumentError( - "Wildcard loader can only be used with exactly " - "one entity. Use Load(ent) to specify " - "specific entities.") + "Wildcard loader can only be used with exactly " + "one entity. Use Load(ent) to specify " + "specific entities.") elif token.endswith(_DEFAULT_TOKEN): raiseerr = False @@ -444,13 +487,12 @@ def _find_entity_basestring(self, query, token, raiseerr): raise sa_exc.ArgumentError( "Query has only expression-based entities - " "can't find property named '%s'." - % (token, ) + % (token, ) ) else: return None - class loader_option(object): def __init__(self): pass @@ -492,6 +534,7 @@ def _add_unbound_all_fn(self, fn): """ % {"name": self.name} return self + @loader_option() def contains_eager(loadopt, attr, alias=None): """Indicate that the given attribute should be eagerly loaded from @@ -532,16 +575,19 @@ def contains_eager(loadopt, attr, alias=None): alias = info.selectable cloned = loadopt.set_relationship_strategy( - attr, - {"lazy": "joined"}, - propagate_to_loaders=False - ) + attr, + {"lazy": "joined"}, + propagate_to_loaders=False + ) cloned.local_opts['eager_from_alias'] = alias return cloned + @contains_eager._add_unbound_fn def contains_eager(*keys, **kw): - return _UnboundLoad()._from_keys(_UnboundLoad.contains_eager, keys, True, kw) + return _UnboundLoad()._from_keys( + _UnboundLoad.contains_eager, keys, True, kw) + @loader_option() def load_only(loadopt, *attrs): @@ -558,11 +604,11 @@ def load_only(loadopt, *attrs): session.query(User).options(load_only("name", "fullname")) Example - given a relationship ``User.addresses -> Address``, specify - subquery loading for the ``User.addresses`` collection, but on each ``Address`` - object load only the ``email_address`` attribute:: + subquery loading for the ``User.addresses`` collection, but on each + ``Address`` object load only the ``email_address`` attribute:: session.query(User).options( - subqueryload("addreses").load_only("email_address") + subqueryload("addresses").load_only("email_address") ) For a :class:`.Query` that has multiple entities, the lead entity can be @@ -578,17 +624,20 @@ def load_only(loadopt, *attrs): """ cloned = loadopt.set_column_strategy( - attrs, - {"deferred": False, "instrument": True} - ) + attrs, + {"deferred": False, "instrument": True} + ) cloned.set_column_strategy("*", - {"deferred": True, "instrument": True}) + {"deferred": True, "instrument": True}, + {"undefer_pks": True}) return cloned + @load_only._add_unbound_fn def load_only(*attrs): return _UnboundLoad().load_only(*attrs) + @loader_option() def joinedload(loadopt, attr, innerjoin=None): """Indicate that the given attribute should be loaded using joined @@ -614,24 +663,59 @@ def joinedload(loadopt, attr, innerjoin=None): query(Order).options(joinedload(Order.user, innerjoin=True)) - If the joined-eager load is chained onto an existing LEFT OUTER JOIN, - ``innerjoin=True`` will be bypassed and the join will continue to - chain as LEFT OUTER JOIN so that the results don't change. As an alternative, - specify the value ``"nested"``. This will instead nest the join - on the right side, e.g. using the form "a LEFT OUTER JOIN (b JOIN c)". + In order to chain multiple eager joins together where some may be + OUTER and others INNER, right-nested joins are used to link them:: + + query(A).options( + joinedload(A.bs, innerjoin=False). + joinedload(B.cs, innerjoin=True) + ) + + The above query, linking A.bs via "outer" join and B.cs via "inner" join + would render the joins as "a LEFT OUTER JOIN (b JOIN c)". When using + SQLite, this form of JOIN is translated to use full subqueries as this + syntax is otherwise not directly supported. + + The ``innerjoin`` flag can also be stated with the term ``"unnested"``. + This will prevent joins from being right-nested, and will instead + link an "innerjoin" eagerload to an "outerjoin" eagerload by bypassing + the "inner" join. Using this form as follows:: + + query(A).options( + joinedload(A.bs, innerjoin=False). + joinedload(B.cs, innerjoin="unnested") + ) + + Joins will be rendered as "a LEFT OUTER JOIN b LEFT OUTER JOIN c", so that + all of "a" is matched rather than being incorrectly limited by a "b" that + does not contain a "c". + + .. note:: The "unnested" flag does **not** affect the JOIN rendered + from a many-to-many association table, e.g. a table configured + as :paramref:`.relationship.secondary`, to the target table; for + correctness of results, these joins are always INNER and are + therefore right-nested if linked to an OUTER join. - .. versionadded:: 0.9.4 Added ``innerjoin="nested"`` option to support - nesting of eager "inner" joins. + .. versionadded:: 0.9.4 Added support for "nesting" of eager "inner" + joins. See :ref:`feature_2976`. + + .. versionchanged:: 1.0.0 ``innerjoin=True`` now implies + ``innerjoin="nested"``, whereas in 0.9 it implied + ``innerjoin="unnested"``. In order to achieve the pre-1.0 "unnested" + inner join behavior, use the value ``innerjoin="unnested"``. + See :ref:`migration_3008`. .. note:: - The joins produced by :func:`.orm.joinedload` are **anonymously aliased**. - The criteria by which the join proceeds cannot be modified, nor can the - :class:`.Query` refer to these joins in any way, including ordering. + The joins produced by :func:`.orm.joinedload` are **anonymously + aliased**. The criteria by which the join proceeds cannot be + modified, nor can the :class:`.Query` refer to these joins in any way, + including ordering. To produce a specific SQL JOIN which is explicitly available, use :meth:`.Query.join`. To combine explicit JOINs with eager loading - of collections, use :func:`.orm.contains_eager`; see :ref:`contains_eager`. + of collections, use :func:`.orm.contains_eager`; see + :ref:`contains_eager`. .. seealso:: @@ -645,8 +729,8 @@ def joinedload(loadopt, attr, innerjoin=None): :paramref:`.relationship.lazy` - :paramref:`.relationship.innerjoin` - :func:`.relationship`-level version - of the :paramref:`.joinedload.innerjoin` option. + :paramref:`.relationship.innerjoin` - :func:`.relationship`-level + version of the :paramref:`.joinedload.innerjoin` option. """ loader = loadopt.set_relationship_strategy(attr, {"lazy": "joined"}) @@ -654,15 +738,17 @@ def joinedload(loadopt, attr, innerjoin=None): loader.local_opts['innerjoin'] = innerjoin return loader + @joinedload._add_unbound_fn def joinedload(*keys, **kw): return _UnboundLoad._from_keys( - _UnboundLoad.joinedload, keys, False, kw) + _UnboundLoad.joinedload, keys, False, kw) + @joinedload._add_unbound_all_fn def joinedload_all(*keys, **kw): return _UnboundLoad._from_keys( - _UnboundLoad.joinedload, keys, True, kw) + _UnboundLoad.joinedload, keys, True, kw) @loader_option() @@ -699,14 +785,17 @@ def subqueryload(loadopt, attr): """ return loadopt.set_relationship_strategy(attr, {"lazy": "subquery"}) + @subqueryload._add_unbound_fn def subqueryload(*keys): return _UnboundLoad._from_keys(_UnboundLoad.subqueryload, keys, False, {}) + @subqueryload._add_unbound_all_fn def subqueryload_all(*keys): return _UnboundLoad._from_keys(_UnboundLoad.subqueryload, keys, True, {}) + @loader_option() def lazyload(loadopt, attr): """Indicate that the given attribute should be loaded using "lazy" @@ -722,14 +811,17 @@ def lazyload(loadopt, attr): """ return loadopt.set_relationship_strategy(attr, {"lazy": "select"}) + @lazyload._add_unbound_fn def lazyload(*keys): return _UnboundLoad._from_keys(_UnboundLoad.lazyload, keys, False, {}) + @lazyload._add_unbound_all_fn def lazyload_all(*keys): return _UnboundLoad._from_keys(_UnboundLoad.lazyload, keys, True, {}) + @loader_option() def immediateload(loadopt, attr): """Indicate that the given attribute should be loaded using @@ -752,9 +844,11 @@ def immediateload(loadopt, attr): loader = loadopt.set_relationship_strategy(attr, {"lazy": "immediate"}) return loader + @immediateload._add_unbound_fn def immediateload(*keys): - return _UnboundLoad._from_keys(_UnboundLoad.immediateload, keys, False, {}) + return _UnboundLoad._from_keys( + _UnboundLoad.immediateload, keys, False, {}) @loader_option() @@ -771,10 +865,12 @@ def noload(loadopt, attr): return loadopt.set_relationship_strategy(attr, {"lazy": "noload"}) + @noload._add_unbound_fn def noload(*keys): return _UnboundLoad._from_keys(_UnboundLoad.noload, keys, False, {}) + @loader_option() def defaultload(loadopt, attr): """Indicate an attribute should load using its default loader style. @@ -795,14 +891,16 @@ def defaultload(loadopt, attr): """ return loadopt.set_relationship_strategy( - attr, - None - ) + attr, + None + ) + @defaultload._add_unbound_fn def defaultload(*keys): return _UnboundLoad._from_keys(_UnboundLoad.defaultload, keys, False, {}) + @loader_option() def defer(loadopt, key): """Indicate that the given column-oriented attribute should be deferred, e.g. @@ -856,19 +954,21 @@ def defer(loadopt, key): """ return loadopt.set_column_strategy( - (key, ), - {"deferred": True, "instrument": True} - ) + (key, ), + {"deferred": True, "instrument": True} + ) @defer._add_unbound_fn def defer(key, *addl_attrs): - return _UnboundLoad._from_keys(_UnboundLoad.defer, (key, ) + addl_attrs, False, {}) + return _UnboundLoad._from_keys( + _UnboundLoad.defer, (key, ) + addl_attrs, False, {}) + @loader_option() def undefer(loadopt, key): - """Indicate that the given column-oriented attribute should be undeferred, e.g. - specified within the SELECT statement of the entity as a whole. + """Indicate that the given column-oriented attribute should be undeferred, + e.g. specified within the SELECT statement of the entity as a whole. The column being undeferred is typically set up on the mapping as a :func:`.deferred` attribute. @@ -882,7 +982,8 @@ def undefer(loadopt, key): session.query(MyClass).options(undefer("col1"), undefer("col2")) # undefer all columns specific to a single class using Load + * - session.query(MyClass, MyOtherClass).options(Load(MyClass).undefer("*")) + session.query(MyClass, MyOtherClass).options( + Load(MyClass).undefer("*")) :param key: Attribute to be undeferred. @@ -900,17 +1001,21 @@ def undefer(loadopt, key): """ return loadopt.set_column_strategy( - (key, ), - {"deferred": False, "instrument": True} - ) + (key, ), + {"deferred": False, "instrument": True} + ) + @undefer._add_unbound_fn def undefer(key, *addl_attrs): - return _UnboundLoad._from_keys(_UnboundLoad.undefer, (key, ) + addl_attrs, False, {}) + return _UnboundLoad._from_keys( + _UnboundLoad.undefer, (key, ) + addl_attrs, False, {}) + @loader_option() def undefer_group(loadopt, name): - """Indicate that columns within the given deferred group name should be undeferred. + """Indicate that columns within the given deferred group name should be + undeferred. The columns being undeferred are set up on the mapping as :func:`.deferred` attributes and include a "group" name. @@ -920,9 +1025,11 @@ def undefer_group(loadopt, name): session.query(MyClass).options(undefer_group("large_attrs")) To undefer a group of attributes on a related entity, the path can be - spelled out using relationship loader options, such as :func:`.orm.defaultload`:: + spelled out using relationship loader options, such as + :func:`.orm.defaultload`:: - session.query(MyClass).options(defaultload("someattr").undefer_group("large_attrs")) + session.query(MyClass).options( + defaultload("someattr").undefer_group("large_attrs")) .. versionchanged:: 0.9.0 :func:`.orm.undefer_group` is now specific to a particiular entity load path. @@ -936,13 +1043,14 @@ def undefer_group(loadopt, name): :func:`.orm.undefer` """ + loadopt._merge_into_path = True return loadopt.set_column_strategy( - "*", - None, - {"undefer_group": name} - ) + "*", + None, + {"undefer_group_%s" % name: True} + ) + @undefer_group._add_unbound_fn def undefer_group(name): return _UnboundLoad().undefer_group(name) - diff --git a/lib/sqlalchemy/orm/sync.py b/lib/sqlalchemy/orm/sync.py index cf735fc53e..ccca50871a 100644 --- a/lib/sqlalchemy/orm/sync.py +++ b/lib/sqlalchemy/orm/sync.py @@ -1,5 +1,6 @@ # orm/sync.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -13,7 +14,7 @@ def populate(source, source_mapper, dest, dest_mapper, - synchronize_pairs, uowcommit, flag_cascaded_pks): + synchronize_pairs, uowcommit, flag_cascaded_pks): source_dict = source.dict dest_dict = dest.dict @@ -22,7 +23,7 @@ def populate(source, source_mapper, dest, dest_mapper, # inline of source_mapper._get_state_attr_by_column prop = source_mapper._columntoproperty[l] value = source.manager[prop.key].impl.get(source, source_dict, - attributes.PASSIVE_OFF) + attributes.PASSIVE_OFF) except exc.UnmappedColumnError: _raise_col_to_prop(False, source_mapper, l, dest_mapper, r) @@ -39,14 +40,34 @@ def populate(source, source_mapper, dest, dest_mapper, # reasons, since we only need this info for a primary key # destination. if flag_cascaded_pks and l.primary_key and \ - r.primary_key and \ - r.references(l): + r.primary_key and \ + r.references(l): uowcommit.attributes[("pk_cascaded", dest, r)] = True +def bulk_populate_inherit_keys( + source_dict, source_mapper, synchronize_pairs): + # a simplified version of populate() used by bulk insert mode + for l, r in synchronize_pairs: + try: + prop = source_mapper._columntoproperty[l] + value = source_dict[prop.key] + except exc.UnmappedColumnError: + _raise_col_to_prop(False, source_mapper, l, source_mapper, r) + + try: + prop = source_mapper._columntoproperty[r] + source_dict[prop.key] = value + except exc.UnmappedColumnError: + _raise_col_to_prop(True, source_mapper, l, source_mapper, r) + + def clear(dest, dest_mapper, synchronize_pairs): for l, r in synchronize_pairs: - if r.primary_key: + if r.primary_key and \ + dest_mapper._get_state_attr_by_column( + dest, dest.dict, r) not in orm_util._none_set: + raise AssertionError( "Dependency rule tried to blank-out primary key " "column '%s' on instance '%s'" % @@ -64,7 +85,7 @@ def update(source, source_mapper, dest, old_prefix, synchronize_pairs): oldvalue = source_mapper._get_committed_attr_by_column( source.obj(), l) value = source_mapper._get_state_attr_by_column( - source, source.dict, l) + source, source.dict, l, passive=attributes.PASSIVE_OFF) except exc.UnmappedColumnError: _raise_col_to_prop(False, source_mapper, l, None, r) dest[r.key] = value @@ -75,7 +96,7 @@ def populate_dict(source, source_mapper, dict_, synchronize_pairs): for l, r in synchronize_pairs: try: value = source_mapper._get_state_attr_by_column( - source, source.dict, l) + source, source.dict, l, passive=attributes.PASSIVE_OFF) except exc.UnmappedColumnError: _raise_col_to_prop(False, source_mapper, l, None, r) @@ -92,8 +113,8 @@ def source_modified(uowcommit, source, source_mapper, synchronize_pairs): prop = source_mapper._columntoproperty[l] except exc.UnmappedColumnError: _raise_col_to_prop(False, source_mapper, l, None, r) - history = uowcommit.get_attribute_history(source, prop.key, - attributes.PASSIVE_NO_INITIALIZE) + history = uowcommit.get_attribute_history( + source, prop.key, attributes.PASSIVE_NO_INITIALIZE) if bool(history.deleted): return True else: @@ -103,16 +124,17 @@ def source_modified(uowcommit, source, source_mapper, synchronize_pairs): def _raise_col_to_prop(isdest, source_mapper, source_column, dest_mapper, dest_column): if isdest: - raise exc.UnmappedColumnError("Can't execute sync rule for " - "destination column '%s'; mapper '%s' does not map " - "this column. Try using an explicit `foreign_keys` " - "collection which does not include this column (or use " - "a viewonly=True relation)." % (dest_column, - dest_mapper)) + raise exc.UnmappedColumnError( + "Can't execute sync rule for " + "destination column '%s'; mapper '%s' does not map " + "this column. Try using an explicit `foreign_keys` " + "collection which does not include this column (or use " + "a viewonly=True relation)." % (dest_column, dest_mapper)) else: - raise exc.UnmappedColumnError("Can't execute sync rule for " - "source column '%s'; mapper '%s' does not map this " - "column. Try using an explicit `foreign_keys` " - "collection which does not include destination column " - "'%s' (or use a viewonly=True relation)." - % (source_column, source_mapper, dest_column)) + raise exc.UnmappedColumnError( + "Can't execute sync rule for " + "source column '%s'; mapper '%s' does not map this " + "column. Try using an explicit `foreign_keys` " + "collection which does not include destination column " + "'%s' (or use a viewonly=True relation)." % + (source_column, source_mapper, dest_column)) diff --git a/lib/sqlalchemy/orm/unitofwork.py b/lib/sqlalchemy/orm/unitofwork.py index 2964705a2e..8b4ae64bb0 100644 --- a/lib/sqlalchemy/orm/unitofwork.py +++ b/lib/sqlalchemy/orm/unitofwork.py @@ -1,5 +1,6 @@ # orm/unitofwork.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -15,6 +16,7 @@ from .. import util, event from ..util import topological from . import attributes, persistence, util as orm_util +import itertools def track_cascade_events(descriptor, prop): @@ -55,16 +57,16 @@ def remove(state, item, initiator): if sess._warn_on_events: sess._flush_warning( - "collection remove" - if prop.uselist - else "related attribute delete") + "collection remove" + if prop.uselist + else "related attribute delete") # expunge pending orphans item_state = attributes.instance_state(item) if prop._cascade.delete_orphan and \ item_state in sess._new and \ prop.mapper._is_orphan(item_state): - sess.expunge(item) + sess.expunge(item) def set_(state, newvalue, oldvalue, initiator): # process "save_update" cascade rules for when an instance @@ -82,18 +84,19 @@ def set_(state, newvalue, oldvalue, initiator): if newvalue is not None: newvalue_state = attributes.instance_state(newvalue) if prop._cascade.save_update and \ - (prop.cascade_backrefs or key == initiator.key) and \ - not sess._contains_state(newvalue_state): + (prop.cascade_backrefs or key == initiator.key) and \ + not sess._contains_state(newvalue_state): sess._save_or_update_state(newvalue_state) if oldvalue is not None and \ + oldvalue is not attributes.NEVER_SET and \ oldvalue is not attributes.PASSIVE_NO_RESULT and \ - prop._cascade.delete_orphan: + prop._cascade.delete_orphan: # possible to reach here with attributes.NEVER_SET ? oldvalue_state = attributes.instance_state(oldvalue) if oldvalue_state in sess._new and \ - prop.mapper._is_orphan(oldvalue_state): + prop.mapper._is_orphan(oldvalue_state): sess.expunge(oldvalue) return newvalue @@ -173,7 +176,7 @@ def remove_state_actions(self, state): self.states[state] = (isdelete, True) def get_attribute_history(self, state, key, - passive=attributes.PASSIVE_NO_INITIALIZE): + passive=attributes.PASSIVE_NO_INITIALIZE): """facade to attributes.get_state_history(), including caching of results.""" @@ -189,11 +192,11 @@ def get_attribute_history(self, state, key, # we want non-passive, do a non-passive lookup and re-cache if not cached_passive & attributes.SQL_OK \ - and passive & attributes.SQL_OK: + and passive & attributes.SQL_OK: impl = state.manager[key].impl history = impl.get_history(state, state.dict, - attributes.PASSIVE_OFF | - attributes.LOAD_AGAINST_COMMITTED) + attributes.PASSIVE_OFF | + attributes.LOAD_AGAINST_COMMITTED) if history and impl.uses_objects: state_history = history.as_state() else: @@ -204,13 +207,13 @@ def get_attribute_history(self, state, key, # TODO: store the history as (state, object) tuples # so we don't have to keep converting here history = impl.get_history(state, state.dict, passive | - attributes.LOAD_AGAINST_COMMITTED) + attributes.LOAD_AGAINST_COMMITTED) if history and impl.uses_objects: state_history = history.as_state() else: state_history = history self.attributes[hashkey] = (history, state_history, - passive) + passive) return state_history @@ -223,13 +226,13 @@ def register_preprocessor(self, processor, fromparent): self.presort_actions[key] = Preprocess(processor, fromparent) def register_object(self, state, isdelete=False, - listonly=False, cancel_delete=False, - operation=None, prop=None): + listonly=False, cancel_delete=False, + operation=None, prop=None): if not self.session._contains_state(state): if not state.deleted and operation is not None: util.warn("Object of type %s not in session, %s operation " - "along '%s' will not proceed" % - (orm_util.state_class_str(state), operation, prop)) + "along '%s' will not proceed" % + (orm_util.state_class_str(state), operation, prop)) return False if state not in self.states: @@ -276,8 +279,8 @@ def _mapper_for_dep(self): """ return util.PopulateDict( - lambda tup: tup[0]._props.get(tup[1].key) is tup[1].prop - ) + lambda tup: tup[0]._props.get(tup[1].key) is tup[1].prop + ) def filter_states_for_dep(self, dep, states): """Filter the given list of InstanceStates to those relevant to the @@ -312,8 +315,8 @@ def _generate_actions(self): # see if the graph of mapper dependencies has cycles. self.cycles = cycles = topological.find_cycles( - self.dependencies, - list(self.postsort_actions.values())) + self.dependencies, + list(self.postsort_actions.values())) if cycles: # if yes, break the per-mapper actions into @@ -328,8 +331,8 @@ def _generate_actions(self): # that were broken up. for edge in list(self.dependencies): if None in edge or \ - edge[0].disabled or edge[1].disabled or \ - cycles.issuperset(edge): + edge[0].disabled or edge[1].disabled or \ + cycles.issuperset(edge): self.dependencies.remove(edge) elif edge[0] in cycles: self.dependencies.remove(edge) @@ -343,30 +346,30 @@ def _generate_actions(self): return set([a for a in self.postsort_actions.values() if not a.disabled ] - ).difference(cycles) + ).difference(cycles) def execute(self): postsort_actions = self._generate_actions() - #sort = topological.sort(self.dependencies, postsort_actions) - #print "--------------" - #print "\ndependencies:", self.dependencies - #print "\ncycles:", self.cycles - #print "\nsort:", list(sort) - #print "\nCOUNT OF POSTSORT ACTIONS", len(postsort_actions) + # sort = topological.sort(self.dependencies, postsort_actions) + # print "--------------" + # print "\ndependencies:", self.dependencies + # print "\ncycles:", self.cycles + # print "\nsort:", list(sort) + # print "\nCOUNT OF POSTSORT ACTIONS", len(postsort_actions) # execute if self.cycles: for set_ in topological.sort_as_subsets( - self.dependencies, - postsort_actions): + self.dependencies, + postsort_actions): while set_: n = set_.pop() n.execute_aggregate(self, set_) else: for rec in topological.sort( - self.dependencies, - postsort_actions): + self.dependencies, + postsort_actions): rec.execute(self) def finalize_flush_changes(self): @@ -377,14 +380,19 @@ def finalize_flush_changes(self): execute() method has succeeded and the transaction has been committed. """ + if not self.states: + return + states = set(self.states) isdel = set( s for (s, (isdelete, listonly)) in self.states.items() if isdelete ) other = states.difference(isdel) - self.session._remove_newly_deleted(isdel) - self.session._register_newly_persistent(other) + if isdel: + self.session._remove_newly_deleted(isdel) + if other: + self.session._register_newly_persistent(other) class IterateMappersMixin(object): @@ -428,11 +436,11 @@ def execute(self, uow): if (delete_states or save_states): if not self.setup_flush_actions and ( - self.dependency_processor.\ - prop_has_changes(uow, delete_states, True) or - self.dependency_processor.\ - prop_has_changes(uow, save_states, False) - ): + self.dependency_processor. + prop_has_changes(uow, delete_states, True) or + self.dependency_processor. + prop_has_changes(uow, save_states, False) + ): self.dependency_processor.per_property_flush_actions(uow) self.setup_flush_actions = True return True @@ -449,8 +457,8 @@ def __new__(cls, uow, *args): return uow.postsort_actions[key] else: uow.postsort_actions[key] = \ - ret = \ - object.__new__(cls) + ret = \ + object.__new__(cls) return ret def execute_aggregate(self, uow, recs): @@ -469,7 +477,7 @@ def __init__(self, uow, dependency_processor, delete, fromparent): self.delete = delete self.fromparent = fromparent uow.deps[dependency_processor.parent.base_mapper].\ - add(dependency_processor) + add(dependency_processor) def execute(self, uow): states = self._elements(uow) @@ -519,13 +527,14 @@ def __init__(self, uow, mapper): def execute(self, uow): persistence.save_obj(self.mapper, - uow.states_for_mapper_hierarchy(self.mapper, False, False), - uow - ) + uow.states_for_mapper_hierarchy( + self.mapper, False, False), + uow + ) def per_state_flush_actions(self, uow): states = list(uow.states_for_mapper_hierarchy( - self.mapper, False, False)) + self.mapper, False, False)) base_mapper = self.mapper.base_mapper delete_all = DeleteAll(uow, base_mapper) for state in states: @@ -547,13 +556,14 @@ def __init__(self, uow, mapper): def execute(self, uow): persistence.delete_obj(self.mapper, - uow.states_for_mapper_hierarchy(self.mapper, True, False), - uow - ) + uow.states_for_mapper_hierarchy( + self.mapper, True, False), + uow + ) def per_state_flush_actions(self, uow): states = list(uow.states_for_mapper_hierarchy( - self.mapper, True, False)) + self.mapper, True, False)) base_mapper = self.mapper.base_mapper save_all = SaveUpdateAll(uow, base_mapper) for state in states: @@ -579,9 +589,9 @@ def execute_aggregate(self, uow, recs): dependency_processor = self.dependency_processor delete = self.delete our_recs = [r for r in recs - if r.__class__ is cls_ and - r.dependency_processor is dependency_processor and - r.delete is delete] + if r.__class__ is cls_ and + r.dependency_processor is dependency_processor and + r.delete is delete] recs.difference_update(our_recs) states = [self.state] + [r.state for r in our_recs] if delete: @@ -607,13 +617,13 @@ def execute_aggregate(self, uow, recs): cls_ = self.__class__ mapper = self.mapper our_recs = [r for r in recs - if r.__class__ is cls_ and - r.mapper is mapper] + if r.__class__ is cls_ and + r.mapper is mapper] recs.difference_update(our_recs) persistence.save_obj(mapper, - [self.state] + - [r.state for r in our_recs], - uow) + [self.state] + + [r.state for r in our_recs], + uow) def __repr__(self): return "%s(%s)" % ( @@ -631,13 +641,13 @@ def execute_aggregate(self, uow, recs): cls_ = self.__class__ mapper = self.mapper our_recs = [r for r in recs - if r.__class__ is cls_ and - r.mapper is mapper] + if r.__class__ is cls_ and + r.mapper is mapper] recs.difference_update(our_recs) states = [self.state] + [r.state for r in our_recs] persistence.delete_obj(mapper, - [s for s in states if uow.states[s][0]], - uow) + [s for s in states if uow.states[s][0]], + uow) def __repr__(self): return "%s(%s)" % ( diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py index dd85f2ef1d..42fadcaa6c 100644 --- a/lib/sqlalchemy/orm/util.py +++ b/lib/sqlalchemy/orm/util.py @@ -1,5 +1,6 @@ # orm/util.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -12,9 +13,9 @@ import re from .base import instance_str, state_str, state_class_str, attribute_str, \ - state_attribute_str, object_mapper, object_state, _none_set + state_attribute_str, object_mapper, object_state, _none_set, _never_set from .base import class_mapper, _class_to_mapper -from .base import _InspectionAttr +from .base import InspectionAttr from .path_registry import PathRegistry all_cascades = frozenset(("delete", "delete-orphan", "all", "merge", @@ -26,24 +27,22 @@ class CascadeOptions(frozenset): """Keeps track of the options sent to relationship().cascade""" _add_w_all_cascades = all_cascades.difference([ - 'all', 'none', 'delete-orphan']) + 'all', 'none', 'delete-orphan']) _allowed_cascades = all_cascades - def __new__(cls, arg): - values = set([ - c for c - in re.split('\s*,\s*', arg or "") - if c - ]) + __slots__ = ( + 'save_update', 'delete', 'refresh_expire', 'merge', + 'expunge', 'delete_orphan') + def __new__(cls, value_list): + if isinstance(value_list, util.string_types) or value_list is None: + return cls.from_string(value_list) + values = set(value_list) if values.difference(cls._allowed_cascades): raise sa_exc.ArgumentError( - "Invalid cascade option(s): %s" % - ", ".join([repr(x) for x in - sorted( - values.difference(cls._allowed_cascades) - )]) - ) + "Invalid cascade option(s): %s" % + ", ".join([repr(x) for x in + sorted(values.difference(cls._allowed_cascades))])) if "all" in values: values.update(cls._add_w_all_cascades) @@ -61,7 +60,7 @@ def __new__(cls, arg): if self.delete_orphan and not self.delete: util.warn("The 'delete-orphan' cascade " - "option requires 'delete'.") + "option requires 'delete'.") return self def __repr__(self): @@ -69,9 +68,21 @@ def __repr__(self): ",".join([x for x in sorted(self)]) ) + @classmethod + def from_string(cls, arg): + values = [ + c for c + in re.split('\s*,\s*', arg or "") + if c + ] + return cls(values) + -def _validator_events(desc, key, validator, include_removes, include_backrefs): - """Runs a validation method on an attribute value to be set or appended.""" +def _validator_events( + desc, key, validator, include_removes, include_backrefs): + """Runs a validation method on an attribute value to be set or + appended. + """ if not include_backrefs: def detect_is_backref(state, initiator): @@ -115,7 +126,7 @@ def set_(state, value, oldvalue, initiator): def polymorphic_union(table_map, typecolname, - aliasname='p_union', cast_nulls=True): + aliasname='p_union', cast_nulls=True): """Create a ``UNION`` statement used by a polymorphic mapper. See :ref:`concrete_inheritance` for an example of how @@ -141,7 +152,7 @@ def polymorphic_union(table_map, typecolname, for key in table_map: table = table_map[key] - # mysql doesnt like selecting from a select; + # mysql doesn't like selecting from a select; # make it an alias of the select if isinstance(table, sql.Select): table = table.alias() @@ -167,10 +178,11 @@ def col(name, table): for type, table in table_map.items(): if typecolname is not None: result.append( - sql.select([col(name, table) for name in colnames] + - [sql.literal_column(sql_util._quote_ddl_expr(type)). - label(typecolname)], - from_obj=[table])) + sql.select([col(name, table) for name in colnames] + + [sql.literal_column( + sql_util._quote_ddl_expr(type)). + label(typecolname)], + from_obj=[table])) else: result.append(sql.select([col(name, table) for name in colnames], from_obj=[table])) @@ -224,7 +236,8 @@ def identity_key(*args, **kwargs): E.g.:: - >>> row = engine.execute("select * from table where a=1 and b=2").first() + >>> row = engine.execute("select * from table where a=1 and b=2").\ +first() >>> identity_key(MyClass, row=row) (, (1, 2)) @@ -245,11 +258,12 @@ def identity_key(*args, **kwargs): elif len(args) == 3: class_, ident = args else: - raise sa_exc.ArgumentError("expected up to three " - "positional arguments, got %s" % len(args)) + raise sa_exc.ArgumentError( + "expected up to three positional arguments, " + "got %s" % len(args)) if kwargs: raise sa_exc.ArgumentError("unknown keyword arguments: %s" - % ", ".join(kwargs)) + % ", ".join(kwargs)) mapper = class_mapper(class_) if "ident" in locals(): return mapper.identity_key_from_primary_key(util.to_list(ident)) @@ -257,20 +271,20 @@ def identity_key(*args, **kwargs): instance = kwargs.pop("instance") if kwargs: raise sa_exc.ArgumentError("unknown keyword arguments: %s" - % ", ".join(kwargs.keys)) + % ", ".join(kwargs.keys)) mapper = object_mapper(instance) return mapper.identity_key_from_instance(instance) class ORMAdapter(sql_util.ColumnAdapter): - """Extends ColumnAdapter to accept ORM entities. - - The selectable is extracted from the given entity, - and the AliasedClass if any is referenced. + """ColumnAdapter subclass which excludes adaptation of entities from + non-matching mappers. """ + def __init__(self, entity, equivalents=None, adapt_required=False, - chain_to=None): + chain_to=None, allow_label_resolve=True, + anonymize_labels=False): info = inspection.inspect(entity) self.mapper = info.mapper @@ -280,16 +294,19 @@ def __init__(self, entity, equivalents=None, adapt_required=False, self.aliased_class = entity else: self.aliased_class = None - sql_util.ColumnAdapter.__init__(self, selectable, - equivalents, chain_to, - adapt_required=adapt_required) - def replace(self, elem): + sql_util.ColumnAdapter.__init__( + self, selectable, equivalents, chain_to, + adapt_required=adapt_required, + allow_label_resolve=allow_label_resolve, + anonymize_labels=anonymize_labels, + include_fn=self._include_fn + ) + + def _include_fn(self, elem): entity = elem._annotations.get('parentmapper', None) - if not entity or entity.isa(self.mapper): - return sql_util.ColumnAdapter.replace(self, elem) - else: - return None + return not entity or entity.isa(self.mapper) + class AliasedClass(object): """Represents an "aliased" form of a mapped class for usage with Query. @@ -331,30 +348,32 @@ class AliasedClass(object): argument descriptions. """ + def __init__(self, cls, alias=None, - name=None, - flat=False, - adapt_on_names=False, - # TODO: None for default here? - with_polymorphic_mappers=(), - with_polymorphic_discriminator=None, - base_alias=None, - use_mapper_path=False): + name=None, + flat=False, + adapt_on_names=False, + # TODO: None for default here? + with_polymorphic_mappers=(), + with_polymorphic_discriminator=None, + base_alias=None, + use_mapper_path=False): mapper = _class_to_mapper(cls) if alias is None: alias = mapper._with_polymorphic_selectable.alias( - name=name, flat=flat) + name=name, flat=flat) + self._aliased_insp = AliasedInsp( self, mapper, alias, name, with_polymorphic_mappers - if with_polymorphic_mappers - else mapper.with_polymorphic_mappers, + if with_polymorphic_mappers + else mapper.with_polymorphic_mappers, with_polymorphic_discriminator - if with_polymorphic_discriminator is not None - else mapper.polymorphic_on, + if with_polymorphic_discriminator is not None + else mapper.polymorphic_on, base_alias, use_mapper_path, adapt_on_names @@ -402,7 +421,7 @@ def __repr__(self): id(self), self._aliased_insp._target.__name__) -class AliasedInsp(_InspectionAttr): +class AliasedInsp(InspectionAttr): """Provide an inspection interface for an :class:`.AliasedClass` object. @@ -439,8 +458,8 @@ class AliasedInsp(_InspectionAttr): """ def __init__(self, entity, mapper, selectable, name, - with_polymorphic_mappers, polymorphic_on, - _base_alias, _use_mapper_path, adapt_on_names): + with_polymorphic_mappers, polymorphic_on, + _base_alias, _use_mapper_path, adapt_on_names): self.entity = entity self.mapper = mapper self.selectable = selectable @@ -450,9 +469,9 @@ def __init__(self, entity, mapper, selectable, name, self._base_alias = _base_alias or self self._use_mapper_path = _use_mapper_path - self._adapter = sql_util.ClauseAdapter(selectable, - equivalents=mapper._equivalent_columns, - adapt_on_names=adapt_on_names) + self._adapter = sql_util.ColumnAdapter( + selectable, equivalents=mapper._equivalent_columns, + adapt_on_names=adapt_on_names, anonymize_labels=True) self._adapt_on_names = adapt_on_names self._target = mapper.class_ @@ -460,9 +479,9 @@ def __init__(self, entity, mapper, selectable, name, for poly in self.with_polymorphic_mappers: if poly is not mapper: setattr(self.entity, poly.class_.__name__, - AliasedClass(poly.class_, selectable, base_alias=self, - adapt_on_names=adapt_on_names, - use_mapper_path=_use_mapper_path)) + AliasedClass(poly.class_, selectable, base_alias=self, + adapt_on_names=adapt_on_names, + use_mapper_path=_use_mapper_path)) is_aliased_class = True "always returns True" @@ -510,23 +529,33 @@ def __setstate__(self, state): def _adapt_element(self, elem): return self._adapter.traverse(elem).\ - _annotate({ - 'parententity': self.entity, - 'parentmapper': self.mapper} - ) + _annotate({ + 'parententity': self, + 'parentmapper': self.mapper} + ) def _entity_for_mapper(self, mapper): self_poly = self.with_polymorphic_mappers if mapper in self_poly: - return getattr(self.entity, mapper.class_.__name__)._aliased_insp + if mapper is self.mapper: + return self + else: + return getattr( + self.entity, mapper.class_.__name__)._aliased_insp elif mapper.isa(self.mapper): return self else: - assert False, "mapper %s doesn't correspond to %s" % (mapper, self) + assert False, "mapper %s doesn't correspond to %s" % ( + mapper, self) def __repr__(self): - return '' % ( - id(self), self.class_.__name__) + if self.with_polymorphic_mappers: + with_poly = "(%s)" % ", ".join( + mp.class_.__name__ for mp in self.with_polymorphic_mappers) + else: + with_poly = "" + return '' % ( + id(self), self.class_.__name__, with_poly) inspection._inspects(AliasedClass)(lambda target: target._aliased_insp) @@ -573,11 +602,12 @@ def aliased(element, alias=None, name=None, flat=False, adapt_on_names=False): attribute name that will be accessible via tuples returned by a :class:`.Query` object. - :param flat: Boolean, will be passed through to the :meth:`.FromClause.alias` - call so that aliases of :class:`.Join` objects don't include an enclosing - SELECT. This can lead to more efficient queries in many circumstances. - A JOIN against a nested JOIN will be rewritten as a JOIN against an aliased - SELECT subquery on backends that don't support this syntax. + :param flat: Boolean, will be passed through to the + :meth:`.FromClause.alias` call so that aliases of :class:`.Join` objects + don't include an enclosing SELECT. This can lead to more efficient + queries in many circumstances. A JOIN against a nested JOIN will be + rewritten as a JOIN against an aliased SELECT subquery on backends that + don't support this syntax. .. versionadded:: 0.9.0 @@ -623,13 +653,14 @@ class UnitPrice(Base): return element.alias(name, flat=flat) else: return AliasedClass(element, alias=alias, flat=flat, - name=name, adapt_on_names=adapt_on_names) + name=name, adapt_on_names=adapt_on_names) def with_polymorphic(base, classes, selectable=False, - flat=False, - polymorphic_on=None, aliased=False, - innerjoin=False, _use_mapper_path=False): + flat=False, + polymorphic_on=None, aliased=False, + innerjoin=False, _use_mapper_path=False, + _existing_alias=None): """Produce an :class:`.AliasedClass` construct which specifies columns for descendant mappers of the given base. @@ -660,11 +691,12 @@ def with_polymorphic(base, classes, selectable=False, support parenthesized joins, such as SQLite and older versions of MySQL. - :param flat: Boolean, will be passed through to the :meth:`.FromClause.alias` - call so that aliases of :class:`.Join` objects don't include an enclosing - SELECT. This can lead to more efficient queries in many circumstances. - A JOIN against a nested JOIN will be rewritten as a JOIN against an aliased - SELECT subquery on backends that don't support this syntax. + :param flat: Boolean, will be passed through to the + :meth:`.FromClause.alias` call so that aliases of :class:`.Join` + objects don't include an enclosing SELECT. This can lead to more + efficient queries in many circumstances. A JOIN against a nested JOIN + will be rewritten as a JOIN against an aliased SELECT subquery on + backends that don't support this syntax. Setting ``flat`` to ``True`` implies the ``aliased`` flag is also ``True``. @@ -693,16 +725,26 @@ def with_polymorphic(base, classes, selectable=False, only be specified if querying for one specific subtype only """ primary_mapper = _class_to_mapper(base) + if _existing_alias: + assert _existing_alias.mapper is primary_mapper + classes = util.to_set(classes) + new_classes = set([ + mp.class_ for mp in + _existing_alias.with_polymorphic_mappers]) + if classes == new_classes: + return _existing_alias + else: + classes = classes.union(new_classes) mappers, selectable = primary_mapper.\ - _with_polymorphic_args(classes, selectable, - innerjoin=innerjoin) + _with_polymorphic_args(classes, selectable, + innerjoin=innerjoin) if aliased or flat: selectable = selectable.alias(flat=flat) return AliasedClass(base, - selectable, - with_polymorphic_mappers=mappers, - with_polymorphic_discriminator=polymorphic_on, - use_mapper_path=_use_mapper_path) + selectable, + with_polymorphic_mappers=mappers, + with_polymorphic_discriminator=polymorphic_on, + use_mapper_path=_use_mapper_path) def _orm_annotate(element, exclude=None): @@ -725,8 +767,8 @@ def _orm_deannotate(element): """ return sql_util._deep_deannotate(element, - values=("_orm_adapt", "parententity") - ) + values=("_orm_adapt", "parententity") + ) def _orm_full_deannotate(element): @@ -738,7 +780,10 @@ class _ORMJoin(expression.Join): __visit_name__ = expression.Join.__visit_name__ - def __init__(self, left, right, onclause=None, isouter=False): + def __init__( + self, + left, right, onclause=None, isouter=False, + _left_memo=None, _right_memo=None): left_info = inspection.inspect(left) left_orm_info = getattr(left, '_joined_from_info', left_info) @@ -748,6 +793,9 @@ def __init__(self, left, right, onclause=None, isouter=False): self._joined_from_info = right_info + self._left_memo = _left_memo + self._right_memo = _right_memo + if isinstance(onclause, util.string_types): onclause = getattr(left_orm_info.entity, onclause) @@ -761,18 +809,19 @@ def __init__(self, left, right, onclause=None, isouter=False): prop = None if prop: - if sql_util.clause_is_present(on_selectable, left_info.selectable): + if sql_util.clause_is_present( + on_selectable, left_info.selectable): adapt_from = on_selectable else: adapt_from = left_info.selectable pj, sj, source, dest, \ secondary, target_adapter = prop._create_joins( - source_selectable=adapt_from, - dest_selectable=adapt_to, - source_polymorphic=True, - dest_polymorphic=True, - of_type=right_info.mapper) + source_selectable=adapt_from, + dest_selectable=adapt_to, + source_polymorphic=True, + dest_polymorphic=True, + of_type=right_info.mapper) if sj is not None: if isouter: @@ -788,6 +837,43 @@ def __init__(self, left, right, onclause=None, isouter=False): expression.Join.__init__(self, left, right, onclause, isouter) + if not prop and getattr(right_info, 'mapper', None) \ + and right_info.mapper.single: + # if single inheritance target and we are using a manual + # or implicit ON clause, augment it the same way we'd augment the + # WHERE. + single_crit = right_info.mapper._single_table_criterion + if single_crit is not None: + if right_info.is_aliased_class: + single_crit = right_info._adapter.traverse(single_crit) + self.onclause = self.onclause & single_crit + + def _splice_into_center(self, other): + """Splice a join into the center. + + Given join(a, b) and join(b, c), return join(a, b).join(c) + + """ + leftmost = other + while isinstance(leftmost, sql.Join): + leftmost = leftmost.left + + assert self.right is leftmost + + left = _ORMJoin( + self.left, other.left, + self.onclause, isouter=self.isouter, + _left_memo=self._left_memo, + _right_memo=other._left_memo + ) + + return _ORMJoin( + left, + other.right, + other.onclause, isouter=other.isouter, + _right_memo=other._right_memo + ) + def join(self, right, onclause=None, isouter=False, join_to_left=None): return _ORMJoin(self, right, onclause, isouter) @@ -880,10 +966,7 @@ def with_parent(instance, prop): elif isinstance(prop, attributes.QueryableAttribute): prop = prop.property - return prop.compare(operators.eq, - instance, - value_is_parent=True) - + return prop._with_parent(instance) def has_identity(object): @@ -901,6 +984,7 @@ def has_identity(object): state = attributes.instance_state(object) return state.has_identity + def was_deleted(object): """Return True if the given object was deleted within a session flush. @@ -913,8 +997,6 @@ def was_deleted(object): return state.deleted - - def randomize_unitofwork(): """Use random-ordering sets within the unit of work in order to detect unit of work sorting issues. @@ -934,9 +1016,9 @@ def randomize_unitofwork(): By calling ``randomize_unitofwork()`` when a script first runs, the ordering of a key series of sets within the unit of work implementation - are randomized, so that the script can be minimized down to the fundamental - mapping and operation that's failing, while still reproducing the issue - on at least some runs. + are randomized, so that the script can be minimized down to the + fundamental mapping and operation that's failing, while still reproducing + the issue on at least some runs. This utility is also available when running the test suite via the ``--reversetop`` flag. @@ -949,5 +1031,4 @@ def randomize_unitofwork(): from sqlalchemy.util import topological from sqlalchemy.testing.util import RandomSet topological.set = unitofwork.set = session.set = mapper.set = \ - dependency.set = RandomSet - + dependency.set = RandomSet diff --git a/lib/sqlalchemy/pool.py b/lib/sqlalchemy/pool.py index 799443546f..32b4736fa3 100644 --- a/lib/sqlalchemy/pool.py +++ b/lib/sqlalchemy/pool.py @@ -1,5 +1,6 @@ # sqlalchemy/pool.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -66,7 +67,9 @@ def clear_managers(): reset_commit = util.symbol('reset_commit') reset_none = util.symbol('reset_none') + class _ConnDialect(object): + """partial implementation of :class:`.Dialect` which provides DBAPI connection methods. @@ -75,6 +78,7 @@ class _ConnDialect(object): :class:`.Dialect`. """ + def do_rollback(self, dbapi_connection): dbapi_connection.rollback() @@ -84,20 +88,22 @@ def do_commit(self, dbapi_connection): def do_close(self, dbapi_connection): dbapi_connection.close() + class Pool(log.Identified): + """Abstract base class for connection pools.""" _dialect = _ConnDialect() def __init__(self, - creator, recycle=-1, echo=None, - use_threadlocal=False, - logging_name=None, - reset_on_return=True, - listeners=None, - events=None, - _dispatch=None, - _dialect=None): + creator, recycle=-1, echo=None, + use_threadlocal=False, + logging_name=None, + reset_on_return=True, + listeners=None, + events=None, + _dispatch=None, + _dialect=None): """ Construct a Pool. @@ -133,10 +139,10 @@ def __init__(self, .. warning:: The :paramref:`.Pool.use_threadlocal` flag **does not affect the behavior** of :meth:`.Engine.connect`. - :meth:`.Engine.connect` makes use of the :meth:`.Pool.unique_connection` - method which **does not use thread local context**. - To produce a :class:`.Connection` which refers to the - :meth:`.Pool.connect` method, use + :meth:`.Engine.connect` makes use of the + :meth:`.Pool.unique_connection` method which **does not use thread + local context**. To produce a :class:`.Connection` which refers + to the :meth:`.Pool.connect` method, use :meth:`.Engine.contextual_connect`. Note that other SQLAlchemy connectivity systems such as @@ -180,6 +186,10 @@ def __init__(self, database that supports transactions, as it will lead to deadlocks and stale state. + * ``"none"`` - same as ``None`` + + .. versionadded:: 0.9.10 + * ``False`` - same as None, this is here for backwards compatibility. @@ -214,16 +224,17 @@ def __init__(self, self._use_threadlocal = use_threadlocal if reset_on_return in ('rollback', True, reset_rollback): self._reset_on_return = reset_rollback - elif reset_on_return in (None, False, reset_none): + elif reset_on_return in ('none', None, False, reset_none): self._reset_on_return = reset_none elif reset_on_return in ('commit', reset_commit): self._reset_on_return = reset_commit else: raise exc.ArgumentError( - "Invalid value for 'reset_on_return': %r" - % reset_on_return) + "Invalid value for 'reset_on_return': %r" + % reset_on_return) self.echo = echo + if _dispatch: self.dispatch._update(_dispatch, only_propagate=False) if _dialect: @@ -233,20 +244,53 @@ def __init__(self, event.listen(self, target, fn) if listeners: util.warn_deprecated( - "The 'listeners' argument to Pool (and " - "create_engine()) is deprecated. Use event.listen().") + "The 'listeners' argument to Pool (and " + "create_engine()) is deprecated. Use event.listen().") for l in listeners: self.add_listener(l) + @property + def _creator(self): + return self.__dict__['_creator'] + + @_creator.setter + def _creator(self, creator): + self.__dict__['_creator'] = creator + self._invoke_creator = self._should_wrap_creator(creator) + + def _should_wrap_creator(self, creator): + """Detect if creator accepts a single argument, or is sent + as a legacy style no-arg function. + + """ + + try: + argspec = util.get_callable_argspec(self._creator, no_self=True) + except TypeError: + return lambda crec: creator() + + defaulted = argspec[3] is not None and len(argspec[3]) or 0 + positionals = len(argspec[0]) - defaulted + + # look for the exact arg signature that DefaultStrategy + # sends us + if (argspec[0], argspec[3]) == (['connection_record'], (None,)): + return creator + # or just a single positional + elif positionals == 1: + return creator + # all other cases, just wrap and assume legacy "creator" callable + # thing + else: + return lambda crec: creator() + def _close_connection(self, connection): self.logger.debug("Closing connection %r", connection) try: self._dialect.do_close(connection) - except (SystemExit, KeyboardInterrupt): - raise - except: + except Exception: self.logger.error("Exception closing connection %r", - connection, exc_info=True) + connection, exc_info=True) @util.deprecated( 2.7, "Pool.add_listener is deprecated. Use event.listen()") @@ -266,8 +310,9 @@ def unique_connection(self): This method is equivalent to :meth:`.Pool.connect` when the :paramref:`.Pool.use_threadlocal` flag is not set to True. - When :paramref:`.Pool.use_threadlocal` is True, the :meth:`.Pool.unique_connection` - method provides a means of bypassing the threadlocal context. + When :paramref:`.Pool.use_threadlocal` is True, the + :meth:`.Pool.unique_connection` method provides a means of bypassing + the threadlocal context. """ return _ConnectionFairy._checkout(self) @@ -294,12 +339,11 @@ def _invalidate(self, connection, exception=None): if getattr(connection, 'is_valid', False): connection.invalidate(exception) - def recreate(self): """Return a new :class:`.Pool`, of the same class as this one and configured with identical creation arguments. - This method is used in conjunection with :meth:`dispose` + This method is used in conjunction with :meth:`dispose` to close out an entire :class:`.Pool` and create a new one in its place. @@ -370,6 +414,7 @@ def status(self): class _ConnectionRecord(object): + """Internal object which maintains an individual DBAPI connection referenced by a :class:`.Pool`. @@ -405,8 +450,8 @@ def __init__(self, pool): self.finalize_callback = deque() pool.dispatch.first_connect.\ - for_modify(pool.dispatch).\ - exec_once(self.connection, self) + for_modify(pool.dispatch).\ + exec_once(self.connection, self) pool.dispatch.connect(self.connection, self) connection = None @@ -418,6 +463,8 @@ def __init__(self, pool): """ + _soft_invalidate_time = 0 + @util.memoized_property def info(self): """The ``.info`` dictionary associated with the DBAPI connection. @@ -434,20 +481,21 @@ def checkout(cls, pool): try: dbapi_connection = rec.get_connection() except: - rec.checkin() - raise - fairy = _ConnectionFairy(dbapi_connection, rec) + with util.safe_reraise(): + rec.checkin() + echo = pool._should_log_debug() + fairy = _ConnectionFairy(dbapi_connection, rec, echo) rec.fairy_ref = weakref.ref( - fairy, - lambda ref: _finalize_fairy and \ - _finalize_fairy( - dbapi_connection, - rec, pool, ref, pool._echo) - ) + fairy, + lambda ref: _finalize_fairy and + _finalize_fairy( + dbapi_connection, + rec, pool, ref, echo) + ) _refs.add(rec) - if pool._echo: + if echo: pool.logger.debug("Connection %r checked out from pool", - dbapi_connection) + dbapi_connection) return fairy def checkin(self): @@ -461,70 +509,102 @@ def checkin(self): pool.dispatch.checkin(connection, self) pool._return_conn(self) - def close(self): if self.connection is not None: self.__close() - def invalidate(self, e=None): + def invalidate(self, e=None, soft=False): """Invalidate the DBAPI connection held by this :class:`._ConnectionRecord`. This method is called for all connection invalidations, including - when the :meth:`._ConnectionFairy.invalidate` or :meth:`.Connection.invalidate` - methods are called, as well as when any so-called "automatic invalidation" - condition occurs. + when the :meth:`._ConnectionFairy.invalidate` or + :meth:`.Connection.invalidate` methods are called, as well as when any + so-called "automatic invalidation" condition occurs. + + :param e: an exception object indicating a reason for the invalidation. + + :param soft: if True, the connection isn't closed; instead, this + connection will be recycled on next checkout. + + .. versionadded:: 1.0.3 .. seealso:: :ref:`pool_connection_invalidation` """ - self.__pool.dispatch.invalidate(self.connection, self, e) + # already invalidated + if self.connection is None: + return + if soft: + self.__pool.dispatch.soft_invalidate(self.connection, self, e) + else: + self.__pool.dispatch.invalidate(self.connection, self, e) if e is not None: self.__pool.logger.info( - "Invalidate connection %r (reason: %s:%s)", + "%sInvalidate connection %r (reason: %s:%s)", + "Soft " if soft else "", self.connection, e.__class__.__name__, e) else: self.__pool.logger.info( - "Invalidate connection %r", self.connection) - self.__close() - self.connection = None + "%sInvalidate connection %r", + "Soft " if soft else "", + self.connection) + if soft: + self._soft_invalidate_time = time.time() + else: + self.__close() + self.connection = None def get_connection(self): recycle = False if self.connection is None: - self.connection = self.__connect() self.info.clear() + self.connection = self.__connect() if self.__pool.dispatch.connect: self.__pool.dispatch.connect(self.connection, self) elif self.__pool._recycle > -1 and \ time.time() - self.starttime > self.__pool._recycle: self.__pool.logger.info( - "Connection %r exceeded timeout; recycling", - self.connection) + "Connection %r exceeded timeout; recycling", + self.connection) recycle = True elif self.__pool._invalidate_time > self.starttime: self.__pool.logger.info( - "Connection %r invalidated due to pool invalidation; recycling", - self.connection - ) + "Connection %r invalidated due to pool invalidation; " + + "recycling", + self.connection + ) + recycle = True + elif self._soft_invalidate_time > self.starttime: + self.__pool.logger.info( + "Connection %r invalidated due to local soft invalidation; " + + "recycling", + self.connection + ) recycle = True if recycle: self.__close() - self.connection = self.__connect() self.info.clear() + + # ensure that if self.__connect() fails, + # we are not referring to the previous stale connection here + self.connection = None + self.connection = self.__connect() + if self.__pool.dispatch.connect: self.__pool.dispatch.connect(self.connection, self) return self.connection def __close(self): + self.finalize_callback.clear() self.__pool._close_connection(self.connection) def __connect(self): try: self.starttime = time.time() - connection = self.__pool._creator() + connection = self.__pool._invoke_creator(self) self.__pool.logger.debug("Created new connection %r", connection) return connection except Exception as e: @@ -532,7 +612,8 @@ def __connect(self): raise -def _finalize_fairy(connection, connection_record, pool, ref, echo, fairy=None): +def _finalize_fairy(connection, connection_record, + pool, ref, echo, fairy=None): """Cleanup for a :class:`._ConnectionFairy` whether or not it's already been garbage collected. @@ -540,26 +621,29 @@ def _finalize_fairy(connection, connection_record, pool, ref, echo, fairy=None): _refs.discard(connection_record) if ref is not None and \ - connection_record.fairy_ref is not ref: + connection_record.fairy_ref is not ref: return if connection is not None: if connection_record and echo: pool.logger.debug("Connection %r being returned to pool", - connection) + connection) try: - fairy = fairy or _ConnectionFairy(connection, connection_record) + fairy = fairy or _ConnectionFairy( + connection, connection_record, echo) assert fairy.connection is connection - fairy._reset(pool, echo) + fairy._reset(pool) # Immediately close detached instances if not connection_record: pool._close_connection(connection) - except Exception as e: + except BaseException as e: + pool.logger.error( + "Exception during reset or similar", exc_info=True) if connection_record: connection_record.invalidate(e=e) - if isinstance(e, (SystemExit, KeyboardInterrupt)): + if not isinstance(e, Exception): raise if connection_record: @@ -570,6 +654,7 @@ def _finalize_fairy(connection, connection_record, pool, ref, echo, fairy=None): class _ConnectionFairy(object): + """Proxies a DBAPI connection and provides return-on-dereference support. @@ -577,10 +662,11 @@ class _ConnectionFairy(object): to provide context management to a DBAPI connection delivered by that :class:`.Pool`. - The name "fairy" is inspired by the fact that the :class:`._ConnectionFairy` - object's lifespan is transitory, as it lasts only for the length of a - specific DBAPI connection being checked out from the pool, and additionally - that as a transparent proxy, it is mostly invisible. + The name "fairy" is inspired by the fact that the + :class:`._ConnectionFairy` object's lifespan is transitory, as it lasts + only for the length of a specific DBAPI connection being checked out from + the pool, and additionally that as a transparent proxy, it is mostly + invisible. .. seealso:: @@ -588,9 +674,10 @@ class _ConnectionFairy(object): """ - def __init__(self, dbapi_connection, connection_record): + def __init__(self, dbapi_connection, connection_record, echo): self.connection = dbapi_connection self._connection_record = connection_record + self._echo = echo connection = None """A reference to the actual DBAPI connection being tracked.""" @@ -606,8 +693,8 @@ def __init__(self, dbapi_connection, connection_record): _reset_agent = None """Refer to an object with a ``.commit()`` and ``.rollback()`` method; if non-None, the "reset-on-return" feature will call upon this object - rather than directly against the dialect-level do_rollback() and do_commit() - methods. + rather than directly against the dialect-level do_rollback() and + do_commit() methods. In practice, a :class:`.Connection` assigns a :class:`.Transaction` object to this variable when one is in scope so that the :class:`.Transaction` @@ -627,7 +714,6 @@ def _checkout(cls, pool, threadconns=None, fairy=None): fairy._pool = pool fairy._counter = 0 - fairy._echo = pool._should_log_debug() if threadconns is not None: threadconns.current = weakref.ref(fairy) @@ -644,14 +730,20 @@ def _checkout(cls, pool, threadconns=None, fairy=None): while attempts > 0: try: pool.dispatch.checkout(fairy.connection, - fairy._connection_record, - fairy) + fairy._connection_record, + fairy) return fairy except exc.DisconnectionError as e: pool.logger.info( "Disconnection detected on checkout: %s", e) fairy._connection_record.invalidate(e) - fairy.connection = fairy._connection_record.get_connection() + try: + fairy.connection = \ + fairy._connection_record.get_connection() + except: + with util.safe_reraise(): + fairy._connection_record.checkin() + attempts -= 1 pool.logger.info("Reconnection attempts exhausted on checkout") @@ -663,31 +755,31 @@ def _checkout_existing(self): def _checkin(self): _finalize_fairy(self.connection, self._connection_record, - self._pool, None, self._echo, fairy=self) + self._pool, None, self._echo, fairy=self) self.connection = None self._connection_record = None _close = _checkin - def _reset(self, pool, echo): + def _reset(self, pool): if pool.dispatch.reset: pool.dispatch.reset(self, self._connection_record) if pool._reset_on_return is reset_rollback: - if echo: + if self._echo: pool.logger.debug("Connection %s rollback-on-return%s", - self.connection, - ", via agent" - if self._reset_agent else "") + self.connection, + ", via agent" + if self._reset_agent else "") if self._reset_agent: self._reset_agent.rollback() else: pool._dialect.do_rollback(self) elif pool._reset_on_return is reset_commit: - if echo: + if self._echo: pool.logger.debug("Connection %s commit-on-return%s", - self.connection, - ", via agent" - if self._reset_agent else "") + self.connection, + ", via agent" + if self._reset_agent else "") if self._reset_agent: self._reset_agent.commit() else: @@ -719,7 +811,7 @@ def info(self): """ return self._connection_record.info - def invalidate(self, e=None): + def invalidate(self, e=None, soft=False): """Mark this connection as invalidated. This method can be called directly, and is also called as a result @@ -728,6 +820,13 @@ def invalidate(self, e=None): further use by the pool. The invalidation mechanism proceeds via the :meth:`._ConnectionRecord.invalidate` internal method. + :param e: an exception object indicating a reason for the invalidation. + + :param soft: if True, the connection isn't closed; instead, this + connection will be recycled on next checkout. + + .. versionadded:: 1.0.3 + .. seealso:: :ref:`pool_connection_invalidation` @@ -738,9 +837,10 @@ def invalidate(self, e=None): util.warn("Can't invalidate an already-closed connection.") return if self._connection_record: - self._connection_record.invalidate(e=e) - self.connection = None - self._checkin() + self._connection_record.invalidate(e=e, soft=soft) + if not soft: + self.connection = None + self._checkin() def cursor(self, *args, **kwargs): """Return a new DBAPI cursor for the underlying connection. @@ -754,7 +854,6 @@ def cursor(self, *args, **kwargs): def __getattr__(self, key): return getattr(self.connection, key) - def detach(self): """Separate this connection from its Pool. @@ -783,13 +882,26 @@ def close(self): self._checkin() - class SingletonThreadPool(Pool): + """A Pool that maintains one connection per thread. Maintains one connection per each thread, never moving a connection to a thread other than the one which it was created in. + .. warning:: the :class:`.SingletonThreadPool` will call ``.close()`` + on arbitrary connections that exist beyond the size setting of + ``pool_size``, e.g. if more unique **thread identities** + than what ``pool_size`` states are used. This cleanup is + non-deterministic and not sensitive to whether or not the connections + linked to those thread identities are currently in use. + + :class:`.SingletonThreadPool` may be improved in a future release, + however in its current status it is generally used only for test + scenarios using a SQLite ``:memory:`` database and is not recommended + for production use. + + Options are the same as those of :class:`.Pool`, as well as: :param pool_size: The number of threads in which to maintain connections @@ -811,14 +923,14 @@ def __init__(self, creator, pool_size=5, **kw): def recreate(self): self.logger.info("Pool recreating") return self.__class__(self._creator, - pool_size=self.size, - recycle=self._recycle, - echo=self.echo, - logging_name=self._orig_logging_name, - use_threadlocal=self._use_threadlocal, - reset_on_return=self._reset_on_return, - _dispatch=self.dispatch, - _dialect=self._dialect) + pool_size=self.size, + recycle=self._recycle, + echo=self.echo, + logging_name=self._orig_logging_name, + use_threadlocal=self._use_threadlocal, + reset_on_return=self._reset_on_return, + _dispatch=self.dispatch, + _dialect=self._dialect) def dispose(self): """Dispose of this pool.""" @@ -826,9 +938,7 @@ def dispose(self): for conn in self._all_conns: try: conn.close() - except (SystemExit, KeyboardInterrupt): - raise - except: + except Exception: # pysqlite won't even let you close a conn from a thread # that didn't create it pass @@ -842,7 +952,7 @@ def _cleanup(self): def status(self): return "SingletonThreadPool id:%d size: %d" % \ - (id(self), len(self._all_conns)) + (id(self), len(self._all_conns)) def _do_return_conn(self, conn): pass @@ -863,6 +973,7 @@ def _do_get(self): class QueuePool(Pool): + """A :class:`.Pool` that imposes a limit on the number of open connections. :class:`.QueuePool` is the default pooling implementation used for @@ -903,9 +1014,10 @@ def __init__(self, creator, pool_size=5, max_overflow=10, timeout=30, :param timeout: The number of seconds to wait before giving up on returning a connection. Defaults to 30. - :param \**kw: Other keyword arguments including :paramref:`.Pool.recycle`, - :paramref:`.Pool.echo`, :paramref:`.Pool.reset_on_return` and others - are passed to the :class:`.Pool` constructor. + :param \**kw: Other keyword arguments including + :paramref:`.Pool.recycle`, :paramref:`.Pool.echo`, + :paramref:`.Pool.reset_on_return` and others are passed to the + :class:`.Pool` constructor. """ Pool.__init__(self, creator, **kw) @@ -936,16 +1048,16 @@ def _do_get(self): return self._do_get() else: raise exc.TimeoutError( - "QueuePool limit of size %d overflow %d reached, " - "connection timed out, timeout %d" % - (self.size(), self.overflow(), self._timeout)) + "QueuePool limit of size %d overflow %d reached, " + "connection timed out, timeout %d" % + (self.size(), self.overflow(), self._timeout)) if self._inc_overflow(): try: return self._create_connection() except: - self._dec_overflow() - raise + with util.safe_reraise(): + self._dec_overflow() else: return self._do_get() @@ -971,14 +1083,14 @@ def _dec_overflow(self): def recreate(self): self.logger.info("Pool recreating") return self.__class__(self._creator, pool_size=self._pool.maxsize, - max_overflow=self._max_overflow, - timeout=self._timeout, - recycle=self._recycle, echo=self.echo, - logging_name=self._orig_logging_name, - use_threadlocal=self._use_threadlocal, - reset_on_return=self._reset_on_return, - _dispatch=self.dispatch, - _dialect=self._dialect) + max_overflow=self._max_overflow, + timeout=self._timeout, + recycle=self._recycle, echo=self.echo, + logging_name=self._orig_logging_name, + use_threadlocal=self._use_threadlocal, + reset_on_return=self._reset_on_return, + _dispatch=self.dispatch, + _dialect=self._dialect) def dispose(self): while True: @@ -993,11 +1105,11 @@ def dispose(self): def status(self): return "Pool size: %d Connections in pool: %d "\ - "Current Overflow: %d Current Checked out "\ - "connections: %d" % (self.size(), - self.checkedin(), - self.overflow(), - self.checkedout()) + "Current Overflow: %d Current Checked out "\ + "connections: %d" % (self.size(), + self.checkedin(), + self.overflow(), + self.checkedout()) def size(self): return self._pool.maxsize @@ -1013,6 +1125,7 @@ def checkedout(self): class NullPool(Pool): + """A Pool which does not pool connections. Instead it literally opens and closes the underlying DB-API connection @@ -1041,19 +1154,20 @@ def recreate(self): self.logger.info("Pool recreating") return self.__class__(self._creator, - recycle=self._recycle, - echo=self.echo, - logging_name=self._orig_logging_name, - use_threadlocal=self._use_threadlocal, - reset_on_return=self._reset_on_return, - _dispatch=self.dispatch, - _dialect=self._dialect) + recycle=self._recycle, + echo=self.echo, + logging_name=self._orig_logging_name, + use_threadlocal=self._use_threadlocal, + reset_on_return=self._reset_on_return, + _dispatch=self.dispatch, + _dialect=self._dialect) def dispose(self): pass class StaticPool(Pool): + """A Pool of exactly one connection, used for all requests. Reconnect-related functions such as ``recycle`` and connection @@ -1101,6 +1215,7 @@ def _do_get(self): class AssertionPool(Pool): + """A :class:`.Pool` that allows at most one checked out connection at any given time. @@ -1114,6 +1229,7 @@ class AssertionPool(Pool): this in the assertion error raised. """ + def __init__(self, *args, **kw): self._conn = None self._checked_out = False @@ -1138,9 +1254,9 @@ def dispose(self): def recreate(self): self.logger.info("Pool recreating") return self.__class__(self._creator, echo=self.echo, - logging_name=self._orig_logging_name, - _dispatch=self.dispatch, - _dialect=self._dialect) + logging_name=self._orig_logging_name, + _dispatch=self.dispatch, + _dialect=self._dialect) def _do_get(self): if self._checked_out: @@ -1161,6 +1277,7 @@ def _do_get(self): class _DBProxy(object): + """Layers connection pooling behavior on top of a standard DB-API module. Proxies a DB-API 2.0 connect() call to a connection pool keyed to the @@ -1206,8 +1323,8 @@ def get_pool(self, *args, **kw): try: if key not in self.pools: kw.pop('sa_pool_key', None) - pool = self.poolclass(lambda: - self.module.connect(*args, **kw), **self.kw) + pool = self.poolclass( + lambda: self.module.connect(*args, **kw), **self.kw) self.pools[key] = pool return pool else: diff --git a/lib/sqlalchemy/processors.py b/lib/sqlalchemy/processors.py index d0f52e42b8..b57e6740b7 100644 --- a/lib/sqlalchemy/processors.py +++ b/lib/sqlalchemy/processors.py @@ -1,5 +1,6 @@ # sqlalchemy/processors.py -# Copyright (C) 2010-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2010-2016 the SQLAlchemy authors and contributors +# # Copyright (C) 2010 Gaetan de Menten gdementen@gmail.com # # This module is part of SQLAlchemy and is released under @@ -32,15 +33,17 @@ def process(value): m = rmatch(value) except TypeError: raise ValueError("Couldn't parse %s string '%r' " - "- value is not a string." % - (type_.__name__, value)) + "- value is not a string." % + (type_.__name__, value)) if m is None: raise ValueError("Couldn't parse %s string: " - "'%s'" % (type_.__name__, value)) + "'%s'" % (type_.__name__, value)) if has_named_groups: groups = m.groupdict(0) - return type_(**dict(list(zip(iter(groups.keys()), - list(map(int, iter(groups.values()))))))) + return type_(**dict(list(zip( + iter(groups.keys()), + list(map(int, iter(groups.values()))) + )))) else: return type_(*list(map(int, m.groups(0)))) return process @@ -111,7 +114,7 @@ def int_to_boolean(value): return value and True or False DATETIME_RE = re.compile( - "(\d+)-(\d+)-(\d+) (\d+):(\d+):(\d+)(?:\.(\d+))?") + "(\d+)-(\d+)-(\d+) (\d+):(\d+):(\d+)(?:\.(\d+))?") TIME_RE = re.compile("(\d+):(\d+):(\d+)(?:\.(\d+))?") DATE_RE = re.compile("(\d+)-(\d+)-(\d+)") @@ -123,10 +126,10 @@ def int_to_boolean(value): try: from sqlalchemy.cprocessors import UnicodeResultProcessor, \ - DecimalResultProcessor, \ - to_float, to_str, int_to_boolean, \ - str_to_datetime, str_to_time, \ - str_to_date + DecimalResultProcessor, \ + to_float, to_str, int_to_boolean, \ + str_to_datetime, str_to_time, \ + str_to_date def to_unicode_processor_factory(encoding, errors=None): if errors is not None: diff --git a/lib/sqlalchemy/schema.py b/lib/sqlalchemy/schema.py index 8556272a62..5b703f7b6a 100644 --- a/lib/sqlalchemy/schema.py +++ b/lib/sqlalchemy/schema.py @@ -1,10 +1,11 @@ # schema.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -"""Compatiblity namespace for sqlalchemy.sql.schema and related. +"""Compatibility namespace for sqlalchemy.sql.schema and related. """ @@ -34,6 +35,7 @@ UniqueConstraint, _get_table_key, ColumnCollectionConstraint, + ColumnCollectionMixin ) @@ -57,5 +59,7 @@ DDLBase, DDLElement, _CreateDropBase, - _DDLCompiles + _DDLCompiles, + sort_tables, + sort_tables_and_constraints ) diff --git a/lib/sqlalchemy/sql/__init__.py b/lib/sqlalchemy/sql/__init__.py index 95dae5aa35..eb305a83ad 100644 --- a/lib/sqlalchemy/sql/__init__.py +++ b/lib/sqlalchemy/sql/__init__.py @@ -1,5 +1,6 @@ # sql/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -37,6 +38,7 @@ false, False_, func, + funcfilter, insert, intersect, intersect_all, @@ -62,10 +64,11 @@ union, union_all, update, - ) +) from .visitors import ClauseVisitor + def __go(lcls): global __all__ from .. import util as _sa_util @@ -73,7 +76,7 @@ def __go(lcls): import inspect as _inspect __all__ = sorted(name for name, obj in lcls.items() - if not (name.startswith('_') or _inspect.ismodule(obj))) + if not (name.startswith('_') or _inspect.ismodule(obj))) from .annotation import _prepare_annotations, Annotated from .elements import AnnotatedColumnElement, ClauseList @@ -87,4 +90,3 @@ def __go(lcls): from . import naming __go(locals()) - diff --git a/lib/sqlalchemy/sql/annotation.py b/lib/sqlalchemy/sql/annotation.py index 11b0666750..6ad25abaab 100644 --- a/lib/sqlalchemy/sql/annotation.py +++ b/lib/sqlalchemy/sql/annotation.py @@ -1,17 +1,20 @@ # sql/annotation.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """The :class:`.Annotated` class and related routines; creates hash-equivalent -copies of SQL constructs which contain context-specific markers and associations. +copies of SQL constructs which contain context-specific markers and +associations. """ from .. import util from . import operators + class Annotated(object): """clones a ClauseElement and applies an 'annotations' dictionary. @@ -43,6 +46,7 @@ def __init__(self, element, values): self.__dict__ = element.__dict__.copy() self.__element = element self._annotations = values + self._hash = hash(element) def _annotate(self, values): _values = self._annotations.copy() @@ -65,7 +69,8 @@ def _deannotate(self, values=None, clone=True): return self._with_annotations(_values) def _compiler_dispatch(self, visitor, **kw): - return self.__element.__class__._compiler_dispatch(self, visitor, **kw) + return self.__element.__class__._compiler_dispatch( + self, visitor, **kw) @property def _constructor(self): @@ -83,7 +88,7 @@ def _clone(self): return self.__class__(clone, self._annotations) def __hash__(self): - return hash(self.__element) + return self._hash def __eq__(self, other): if isinstance(self.__element, operators.ColumnOperators): @@ -92,14 +97,12 @@ def __eq__(self, other): return hash(other) == hash(self) - # hard-generate Annotated subclasses. this technique # is used instead of on-the-fly types (i.e. type.__new__()) # so that the resulting objects are pickleable. annotated_classes = {} - def _deep_annotate(element, annotations, exclude=None): """Deep copy the given ClauseElement, annotating each element with the given annotations dictionary. @@ -109,8 +112,8 @@ def _deep_annotate(element, annotations, exclude=None): """ def clone(elem): if exclude and \ - hasattr(elem, 'proxy_set') and \ - elem.proxy_set.intersection(exclude): + hasattr(elem, 'proxy_set') and \ + elem.proxy_set.intersection(exclude): newelem = elem._clone() elif annotations != elem._annotations: newelem = elem._annotate(annotations) @@ -162,6 +165,7 @@ def _shallow_annotate(element, annotations): element._copy_internals() return element + def _new_annotation_type(cls, base_cls): if issubclass(cls, Annotated): return cls @@ -177,11 +181,12 @@ def _new_annotation_type(cls, base_cls): break annotated_classes[cls] = anno_cls = type( - "Annotated%s" % cls.__name__, - (base_cls, cls), {}) + "Annotated%s" % cls.__name__, + (base_cls, cls), {}) globals()["Annotated%s" % cls.__name__] = anno_cls return anno_cls + def _prepare_annotations(target_hierarchy, base_cls): stack = [target_hierarchy] while stack: diff --git a/lib/sqlalchemy/sql/base.py b/lib/sqlalchemy/sql/base.py index 379f61ed79..cf7dcfd310 100644 --- a/lib/sqlalchemy/sql/base.py +++ b/lib/sqlalchemy/sql/base.py @@ -1,5 +1,6 @@ # sql/base.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -18,6 +19,7 @@ PARSE_AUTOCOMMIT = util.symbol('PARSE_AUTOCOMMIT') NO_ARG = util.symbol('NO_ARG') + class Immutable(object): """mark a ClauseElement as 'immutable' when expressions are cloned.""" @@ -31,10 +33,10 @@ def _clone(self): return self - def _from_objects(*elements): return itertools.chain(*[element._from_objects for element in elements]) + @util.decorator def _generative(fn, *args, **kw): """Mark a method as generative.""" @@ -49,6 +51,7 @@ class _DialectArgView(collections.MutableMapping): _. """ + def __init__(self, obj): self.obj = obj @@ -75,7 +78,7 @@ def __setitem__(self, key, value): dialect, value_key = self._key(key) except KeyError: raise exc.ArgumentError( - "Keys must be of the form _") + "Keys must be of the form _") else: self.obj.dialect_options[dialect][value_key] = value @@ -85,15 +88,17 @@ def __delitem__(self, key): def __len__(self): return sum(len(args._non_defaults) for args in - self.obj.dialect_options.values()) + self.obj.dialect_options.values()) def __iter__(self): return ( - "%s_%s" % (dialect_name, value_name) + util.safe_kwarg("%s_%s" % (dialect_name, value_name)) for dialect_name in self.obj.dialect_options - for value_name in self.obj.dialect_options[dialect_name]._non_defaults + for value_name in + self.obj.dialect_options[dialect_name]._non_defaults ) + class _DialectArgDict(collections.MutableMapping): """A dictionary view of dialect-level arguments for a specific dialect. @@ -102,6 +107,7 @@ class _DialectArgDict(collections.MutableMapping): and dialect-specified default arguments. """ + def __init__(self): self._non_defaults = {} self._defaults = {} @@ -149,24 +155,26 @@ def argument_for(cls, dialect_name, argument_name, default): some_index = Index('a', 'b', mydialect_length=5) The :meth:`.DialectKWArgs.argument_for` method is a per-argument - way adding extra arguments to the :attr:`.DefaultDialect.construct_arguments` - dictionary. This dictionary provides a list of argument names accepted by - various schema-level constructs on behalf of a dialect. + way adding extra arguments to the + :attr:`.DefaultDialect.construct_arguments` dictionary. This + dictionary provides a list of argument names accepted by various + schema-level constructs on behalf of a dialect. - New dialects should typically specify this dictionary all at once as a data - member of the dialect class. The use case for ad-hoc addition of + New dialects should typically specify this dictionary all at once as a + data member of the dialect class. The use case for ad-hoc addition of argument names is typically for end-user code that is also using a custom compilation scheme which consumes the additional arguments. - :param dialect_name: name of a dialect. The dialect must be locatable, - else a :class:`.NoSuchModuleError` is raised. The dialect must - also include an existing :attr:`.DefaultDialect.construct_arguments` collection, - indicating that it participates in the keyword-argument validation and - default system, else :class:`.ArgumentError` is raised. - If the dialect does not include this collection, then any keyword argument - can be specified on behalf of this dialect already. All dialects - packaged within SQLAlchemy include this collection, however for third - party dialects, support may vary. + :param dialect_name: name of a dialect. The dialect must be + locatable, else a :class:`.NoSuchModuleError` is raised. The + dialect must also include an existing + :attr:`.DefaultDialect.construct_arguments` collection, indicating + that it participates in the keyword-argument validation and default + system, else :class:`.ArgumentError` is raised. If the dialect does + not include this collection, then any keyword argument can be + specified on behalf of this dialect already. All dialects packaged + within SQLAlchemy include this collection, however for third party + dialects, support may vary. :param argument_name: name of the parameter. @@ -178,9 +186,12 @@ def argument_for(cls, dialect_name, argument_name, default): construct_arg_dictionary = DialectKWArgs._kw_registry[dialect_name] if construct_arg_dictionary is None: - raise exc.ArgumentError("Dialect '%s' does have keyword-argument " - "validation and defaults enabled configured" % - dialect_name) + raise exc.ArgumentError( + "Dialect '%s' does have keyword-argument " + "validation and defaults enabled configured" % + dialect_name) + if cls not in construct_arg_dictionary: + construct_arg_dictionary[cls] = {} construct_arg_dictionary[cls][argument_name] = default @util.memoized_property @@ -240,8 +251,8 @@ def dialect_options(self): options to this construct. This is a two-level nested registry, keyed to ```` - and ````. For example, the ``postgresql_where`` argument - would be locatable as:: + and ````. For example, the ``postgresql_where`` + argument would be locatable as:: arg = my_object.dialect_options['postgresql']['where'] @@ -254,8 +265,8 @@ def dialect_options(self): """ return util.PopulateDict( - util.portable_instancemethod(self._kw_reg_for_dialect_cls) - ) + util.portable_instancemethod(self._kw_reg_for_dialect_cls) + ) def _validate_dialect_kwargs(self, kwargs): # validate remaining kwargs that they all specify DB prefixes @@ -266,29 +277,30 @@ def _validate_dialect_kwargs(self, kwargs): for k in kwargs: m = re.match('^(.+?)_(.+)$', k) if not m: - raise TypeError("Additional arguments should be " - "named _, got '%s'" % k) + raise TypeError( + "Additional arguments should be " + "named _, got '%s'" % k) dialect_name, arg_name = m.group(1, 2) try: construct_arg_dictionary = self.dialect_options[dialect_name] except exc.NoSuchModuleError: util.warn( - "Can't validate argument %r; can't " - "locate any SQLAlchemy dialect named %r" % - (k, dialect_name)) + "Can't validate argument %r; can't " + "locate any SQLAlchemy dialect named %r" % + (k, dialect_name)) self.dialect_options[dialect_name] = d = _DialectArgDict() d._defaults.update({"*": None}) d._non_defaults[arg_name] = kwargs[k] else: if "*" not in construct_arg_dictionary and \ - arg_name not in construct_arg_dictionary: + arg_name not in construct_arg_dictionary: raise exc.ArgumentError( - "Argument %r is not accepted by " - "dialect %r on behalf of %r" % ( - k, - dialect_name, self.__class__ - )) + "Argument %r is not accepted by " + "dialect %r on behalf of %r" % ( + k, + dialect_name, self.__class__ + )) else: construct_arg_dictionary[arg_name] = kwargs[k] @@ -421,11 +433,13 @@ def _set_parent_with_dispatch(self, parent): self._set_parent(parent) self.dispatch.after_parent_attach(self, parent) + class SchemaVisitor(ClauseVisitor): """Define the visiting for ``SchemaItem`` objects.""" __traverse_options__ = {'schema_visitor': True} + class ColumnCollection(util.OrderedProperties): """An ordered dictionary that stores a list of ColumnElement instances. @@ -435,10 +449,13 @@ class ColumnCollection(util.OrderedProperties): """ - def __init__(self): + __slots__ = '_all_columns' + + def __init__(self, *columns): super(ColumnCollection, self).__init__() - self.__dict__['_all_col_set'] = util.column_set() - self.__dict__['_all_columns'] = [] + object.__setattr__(self, '_all_columns', []) + for c in columns: + self.add(c) def __str__(self): return repr([str(c) for c in self]) @@ -464,22 +481,18 @@ def replace(self, column): other = self[column.name] if other.name == other.key: remove_col = other - self._all_col_set.remove(other) del self._data[other.key] if column.key in self._data: remove_col = self._data[column.key] - self._all_col_set.remove(remove_col) - self._all_col_set.add(column) self._data[column.key] = column if remove_col is not None: self._all_columns[:] = [column if c is remove_col - else c for c in self._all_columns] + else c for c in self._all_columns] else: self._all_columns.append(column) - def add(self, column): """Add a column to this collection. @@ -487,6 +500,9 @@ def add(self, column): for this dictionary. """ + if not column.key: + raise exc.ArgumentError( + "Can't add unnamed column to column collection") self[column.key] = column def __delitem__(self, key): @@ -506,15 +522,14 @@ def __setitem__(self, key, value): if not existing.shares_lineage(value): util.warn('Column %r on table %r being replaced by ' '%r, which has the same key. Consider ' - 'use_labels for select() statements.' % (key, - getattr(existing, 'table', None), value)) + 'use_labels for select() statements.' % + (key, getattr(existing, 'table', None), value)) # pop out memoized proxy_set as this # operation may very well be occurring # in a _make_proxy operation util.memoized_property.reset(value, "proxy_set") - self._all_col_set.add(value) self._all_columns.append(value) self._data[key] = value @@ -523,19 +538,20 @@ def clear(self): def remove(self, column): del self._data[column.key] - self._all_col_set.remove(column) - self._all_columns[:] = [c for c in self._all_columns if c is not column] + self._all_columns[:] = [ + c for c in self._all_columns if c is not column] def update(self, iter): cols = list(iter) - self._all_columns.extend(c for label, c in cols if c not in self._all_col_set) - self._all_col_set.update(c for label, c in cols) + all_col_set = set(self._all_columns) + self._all_columns.extend( + c for label, c in cols if c not in all_col_set) self._data.update((label, c) for label, c in cols) def extend(self, iter): cols = list(iter) - self._all_columns.extend(c for c in cols if c not in self._all_col_set) - self._all_col_set.update(cols) + all_col_set = set(self._all_columns) + self._all_columns.extend(c for c in cols if c not in all_col_set) self._data.update((c.key, c) for c in cols) __hash__ = None @@ -555,27 +571,24 @@ def __contains__(self, other): return util.OrderedProperties.__contains__(self, other) def __getstate__(self): - return {'_data': self.__dict__['_data'], - '_all_columns': self.__dict__['_all_columns']} + return {'_data': self._data, + '_all_columns': self._all_columns} def __setstate__(self, state): - self.__dict__['_data'] = state['_data'] - self.__dict__['_all_columns'] = state['_all_columns'] - self.__dict__['_all_col_set'] = util.column_set(state['_all_columns']) + object.__setattr__(self, '_data', state['_data']) + object.__setattr__(self, '_all_columns', state['_all_columns']) def contains_column(self, col): - # this has to be done via set() membership - return col in self._all_col_set + return col in set(self._all_columns) def as_immutable(self): - return ImmutableColumnCollection(self._data, self._all_col_set, self._all_columns) + return ImmutableColumnCollection(self._data, self._all_columns) class ImmutableColumnCollection(util.ImmutableProperties, ColumnCollection): - def __init__(self, data, colset, all_columns): + def __init__(self, data, all_columns): util.ImmutableProperties.__init__(self, data) - self.__dict__['_all_col_set'] = colset - self.__dict__['_all_columns'] = all_columns + object.__setattr__(self, '_all_columns', all_columns) extend = remove = util.ImmutableProperties._immutable @@ -603,6 +616,7 @@ def __eq__(self, elements, other): def __hash__(self): return hash(tuple(x for x in self)) + def _bind_or_error(schemaitem, msg=None): bind = schemaitem.bind if not bind: @@ -615,7 +629,7 @@ def _bind_or_error(schemaitem, msg=None): item = '%s object' % name if msg is None: msg = "%s is not bound to an Engine or Connection. "\ - "Execution can not proceed without a database to execute "\ - "against." % item + "Execution can not proceed without a database to execute "\ + "against." % item raise exc.UnboundExecutionError(msg) return bind diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index 5165ee78f1..496844d964 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -1,5 +1,6 @@ # sql/compiler.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -22,13 +23,12 @@ """ +import contextlib import re -from . import schema, sqltypes, operators, functions, \ - util as sql_util, visitors, elements, selectable, base +from . import schema, sqltypes, operators, functions, visitors, \ + elements, selectable, crud from .. import util, exc -import decimal import itertools -import operator RESERVED_WORDS = set([ 'all', 'analyse', 'analyze', 'and', 'any', 'array', @@ -53,7 +53,7 @@ ILLEGAL_INITIAL_CHARACTERS = set([str(x) for x in range(0, 10)]).union(['$']) BIND_PARAMS = re.compile(r'(?= ', operators.eq: ' = ', operators.concat_op: ' || ', - operators.between_op: ' BETWEEN ', operators.match_op: ' MATCH ', + operators.notmatch_op: ' NOT MATCH ', operators.in_op: ' IN ', operators.notin_op: ' NOT IN ', operators.comma_op: ', ', @@ -158,7 +147,9 @@ selectable.CompoundSelect.INTERSECT_ALL: 'INTERSECT ALL' } + class Compiled(object): + """Represent a compiled SQL or DDL expression. The ``__str__`` method of the ``Compiled`` object should produce @@ -171,8 +162,10 @@ class Compiled(object): defaults. """ + _cached_metadata = None + def __init__(self, dialect, statement, bind=None, - compile_kwargs=util.immutabledict()): + compile_kwargs=util.immutabledict()): """Construct a new ``Compiled`` object. :param dialect: ``Dialect`` to compile against. @@ -197,7 +190,7 @@ def __init__(self, dialect, statement, bind=None, self.string = self.process(self.statement, **compile_kwargs) @util.deprecated("0.7", ":class:`.Compiled` objects now compile " - "within the constructor.") + "within the constructor.") def compile(self): """Produce the internal string representation of this element. """ @@ -245,8 +238,8 @@ def execute(self, *multiparams, **params): e = self.bind if e is None: raise exc.UnboundExecutionError( - "This Compiled object is not bound to any Engine " - "or Connection.") + "This Compiled object is not bound to any Engine " + "or Connection.") return e._execute_compiled(self, multiparams, params) def scalar(self, *multiparams, **params): @@ -256,18 +249,20 @@ def scalar(self, *multiparams, **params): return self.execute(*multiparams, **params).scalar() -class TypeCompiler(object): +class TypeCompiler(util.with_metaclass(util.EnsureKWArgType, object)): """Produces DDL specification for TypeEngine objects.""" + ensure_kwarg = 'visit_\w+' + def __init__(self, dialect): self.dialect = dialect - def process(self, type_): - return type_._compiler_dispatch(self) - + def process(self, type_, **kw): + return type_._compiler_dispatch(self, **kw) class _CompileLabel(visitors.Visitable): + """lightweight label object which acts as an expression.Label.""" __visit_name__ = 'label' @@ -287,7 +282,9 @@ def type(self): return self.element.type + class SQLCompiler(Compiled): + """Default implementation of Compiled. Compiles ClauseElements into SQL strings. Uses a similar visit @@ -305,6 +302,8 @@ class SQLCompiler(Compiled): INSERT/UPDATE/DELETE """ + isplaintext = False + returning = None """holds the "returning" collection of columns if the statement is CRUD and defines returning columns @@ -331,7 +330,7 @@ class SQLCompiler(Compiled): """ def __init__(self, dialect, statement, column_keys=None, - inline=False, **kwargs): + inline=False, **kwargs): """Construct a new ``DefaultCompiler`` object. dialect @@ -366,7 +365,12 @@ def __init__(self, dialect, statement, column_keys=None, # column/label name, ColumnElement object (if any) and # TypeEngine. ResultProxy uses this for type processing and # column targeting - self.result_map = {} + self._result_columns = [] + + # if False, means we can't be sure the list of entries + # in _result_columns is actually the rendered order. This + # gets flipped when we use TextAsFrom, for example. + self._ordered_columns = True # true if the paramstyle is positional self.positional = dialect.positional @@ -405,24 +409,44 @@ def _init_cte_state(self): self.ctes_by_name = {} self.ctes_recursive = False if self.positional: - self.cte_positional = [] + self.cte_positional = {} + + @contextlib.contextmanager + def _nested_result(self): + """special API to support the use case of 'nested result sets'""" + result_columns, ordered_columns = ( + self._result_columns, self._ordered_columns) + self._result_columns, self._ordered_columns = [], False + + try: + if self.stack: + entry = self.stack[-1] + entry['need_result_map_for_nested'] = True + else: + entry = None + yield self._result_columns, self._ordered_columns + finally: + if entry: + entry.pop('need_result_map_for_nested') + self._result_columns, self._ordered_columns = ( + result_columns, ordered_columns) def _apply_numbered_params(self): poscount = itertools.count(1) self.string = re.sub( - r'\[_POSITION\]', - lambda m: str(util.next(poscount)), - self.string) + r'\[_POSITION\]', + lambda m: str(util.next(poscount)), + self.string) @util.memoized_property def _bind_processors(self): return dict( - (key, value) for key, value in - ((self.bind_names[bindparam], - bindparam.type._cached_bind_processor(self.dialect)) - for bindparam in self.bind_names) - if value is not None - ) + (key, value) for key, value in + ((self.bind_names[bindparam], + bindparam.type._cached_bind_processor(self.dialect)) + for bindparam in self.bind_names) + if value is not None + ) def is_subquery(self): return len(self.stack) > 1 @@ -436,11 +460,13 @@ def construct_params(self, params=None, _group_number=None, _check=True): if params: pd = {} - for bindparam, name in self.bind_names.items(): + for bindparam in self.bind_names: + name = self.bind_names[bindparam] if bindparam.key in params: pd[name] = params[bindparam.key] elif name in params: pd[name] = params[name] + elif _check and bindparam.required: if _group_number: raise exc.InvalidRequestError( @@ -451,8 +477,11 @@ def construct_params(self, params=None, _group_number=None, _check=True): raise exc.InvalidRequestError( "A value is required for bind parameter %r" % bindparam.key) - else: + + elif bindparam.callable: pd[name] = bindparam.effective_value + else: + pd[name] = bindparam.value return pd else: pd = {} @@ -467,7 +496,11 @@ def construct_params(self, params=None, _group_number=None, _check=True): raise exc.InvalidRequestError( "A value is required for bind parameter %r" % bindparam.key) - pd[self.bind_names[bindparam]] = bindparam.effective_value + + if bindparam.callable: + pd[self.bind_names[bindparam]] = bindparam.effective_value + else: + pd[self.bind_names[bindparam]] = bindparam.value return pd @property @@ -476,6 +509,11 @@ def params(self): compiled object, for those values that are present.""" return self.construct_params(_check=False) + @util.dependencies("sqlalchemy.engine.result") + def _create_result_map(self, result): + """utility method used for unit tests only.""" + return result.ResultMetaData._create_result_map(self._result_columns) + def default_from(self): """Called when a SELECT statement has no froms, and no FROM clause is to be appended. @@ -488,16 +526,72 @@ def default_from(self): def visit_grouping(self, grouping, asfrom=False, **kwargs): return "(" + grouping.element._compiler_dispatch(self, **kwargs) + ")" + def visit_label_reference( + self, element, within_columns_clause=False, **kwargs): + if self.stack and self.dialect.supports_simple_order_by_label: + selectable = self.stack[-1]['selectable'] + + with_cols, only_froms = selectable._label_resolve_dict + if within_columns_clause: + resolve_dict = only_froms + else: + resolve_dict = with_cols + + # this can be None in the case that a _label_reference() + # were subject to a replacement operation, in which case + # the replacement of the Label element may have changed + # to something else like a ColumnClause expression. + order_by_elem = element.element._order_by_label_element + + if order_by_elem is not None and order_by_elem.name in \ + resolve_dict: + + kwargs['render_label_as_label'] = \ + element.element._order_by_label_element + + return self.process( + element.element, within_columns_clause=within_columns_clause, + **kwargs) + + def visit_textual_label_reference( + self, element, within_columns_clause=False, **kwargs): + if not self.stack: + # compiling the element outside of the context of a SELECT + return self.process( + element._text_clause + ) + + selectable = self.stack[-1]['selectable'] + with_cols, only_froms = selectable._label_resolve_dict + try: + if within_columns_clause: + col = only_froms[element.element] + else: + col = with_cols[element.element] + except KeyError: + # treat it like text() + util.warn_limited( + "Can't resolve label reference %r; converting to text()", + util.ellipses_string(element.element)) + return self.process( + element._text_clause + ) + else: + kwargs['render_label_as_label'] = col + return self.process( + col, within_columns_clause=within_columns_clause, **kwargs) + def visit_label(self, label, - add_to_result_map=None, - within_label_clause=False, - within_columns_clause=False, - render_label_as_label=None, - **kw): + add_to_result_map=None, + within_label_clause=False, + within_columns_clause=False, + render_label_as_label=None, + **kw): # only render labels within the columns clause # or ORDER BY clause of a select. dialect-specific compilers # can modify this behavior. - render_label_with_as = within_columns_clause and not within_label_clause + render_label_with_as = (within_columns_clause and not + within_label_clause) render_label_only = render_label_as_label is label if render_label_only or render_label_with_as: @@ -509,27 +603,25 @@ def visit_label(self, label, if render_label_with_as: if add_to_result_map is not None: add_to_result_map( - labelname, - label.name, - (label, labelname, ) + label._alt_names, - label.type + labelname, + label.name, + (label, labelname, ) + label._alt_names, + label.type ) - return label.element._compiler_dispatch(self, - within_columns_clause=True, - within_label_clause=True, - **kw) + \ - OPERATORS[operators.as_] + \ - self.preparer.format_label(label, labelname) + return label.element._compiler_dispatch( + self, within_columns_clause=True, + within_label_clause=True, **kw) + \ + OPERATORS[operators.as_] + \ + self.preparer.format_label(label, labelname) elif render_label_only: - return labelname + return self.preparer.format_label(label, labelname) else: - return label.element._compiler_dispatch(self, - within_columns_clause=False, - **kw) + return label.element._compiler_dispatch( + self, within_columns_clause=False, **kw) def visit_column(self, column, add_to_result_map=None, - include_table=True, **kwargs): + include_table=True, **kwargs): name = orig_name = column.name if name is None: raise exc.CompileError("Cannot compile Column object until " @@ -565,8 +657,8 @@ def visit_column(self, column, add_to_result_map=None, tablename = self._truncated_identifier("alias", tablename) return schema_prefix + \ - self.preparer.quote(tablename) + \ - "." + name + self.preparer.quote(tablename) + \ + "." + name def escape_literal_column(self, text): """provide escaping for the literal_column() construct.""" @@ -580,8 +672,9 @@ def visit_fromclause(self, fromclause, **kwargs): def visit_index(self, index, **kwargs): return index.name - def visit_typeclause(self, typeclause, **kwargs): - return self.dialect.type_compiler.process(typeclause.type) + def visit_typeclause(self, typeclause, **kw): + kw['type_expression'] = typeclause + return self.dialect.type_compiler.process(typeclause.type, **kw) def post_process_text(self, text): return text @@ -594,38 +687,42 @@ def do_bindparam(m): else: return self.bindparam_string(name, **kw) + if not self.stack: + self.isplaintext = True + # un-escape any \:params - return BIND_PARAMS_ESC.sub(lambda m: m.group(1), - BIND_PARAMS.sub(do_bindparam, - self.post_process_text(textclause.text)) + return BIND_PARAMS_ESC.sub( + lambda m: m.group(1), + BIND_PARAMS.sub( + do_bindparam, + self.post_process_text(textclause.text)) ) - def visit_text_as_from(self, taf, iswrapper=False, - compound_index=0, force_result_map=False, - asfrom=False, - parens=True, **kw): + def visit_text_as_from(self, taf, + compound_index=None, + asfrom=False, + parens=True, **kw): toplevel = not self.stack entry = self._default_stack_entry if toplevel else self.stack[-1] - populate_result_map = force_result_map or ( - compound_index == 0 and ( - toplevel or \ - entry['iswrapper'] - ) - ) + populate_result_map = toplevel or \ + ( + compound_index == 0 and entry.get( + 'need_result_map_for_compound', False) + ) or entry.get('need_result_map_for_nested', False) if populate_result_map: + self._ordered_columns = False for c in taf.column_args: self.process(c, within_columns_clause=True, - add_to_result_map=self._add_to_result_map) + add_to_result_map=self._add_to_result_map) text = self.process(taf.element, **kw) if asfrom and parens: text = "(%s)" % text return text - def visit_null(self, expr, **kw): return 'NULL' @@ -641,45 +738,18 @@ def visit_false(self, expr, **kw): else: return "0" - def visit_clauselist(self, clauselist, order_by_select=None, **kw): - if order_by_select is not None: - return self._order_by_clauselist( - clauselist, order_by_select, **kw) - + def visit_clauselist(self, clauselist, **kw): sep = clauselist.operator if sep is None: sep = " " else: sep = OPERATORS[clauselist.operator] return sep.join( - s for s in - ( - c._compiler_dispatch(self, **kw) - for c in clauselist.clauses) - if s) - - def _order_by_clauselist(self, clauselist, order_by_select, **kw): - # look through raw columns collection for labels. - # note that its OK we aren't expanding tables and other selectables - # here; we can only add a label in the ORDER BY for an individual - # label expression in the columns clause. - - raw_col = set(l._order_by_label_element.name - for l in order_by_select._raw_columns - if l._order_by_label_element is not None) - - return ", ".join( - s for s in - ( - c._compiler_dispatch(self, - render_label_as_label= - c._order_by_label_element if - c._order_by_label_element is not None and - c._order_by_label_element.name in raw_col - else None, - **kw) - for c in clauselist.clauses) - if s) + s for s in + ( + c._compiler_dispatch(self, **kw) + for c in clauselist.clauses) + if s) def visit_case(self, clause, **kwargs): x = "CASE " @@ -687,38 +757,44 @@ def visit_case(self, clause, **kwargs): x += clause.value._compiler_dispatch(self, **kwargs) + " " for cond, result in clause.whens: x += "WHEN " + cond._compiler_dispatch( - self, **kwargs - ) + " THEN " + result._compiler_dispatch( - self, **kwargs) + " " + self, **kwargs + ) + " THEN " + result._compiler_dispatch( + self, **kwargs) + " " if clause.else_ is not None: x += "ELSE " + clause.else_._compiler_dispatch( - self, **kwargs - ) + " " + self, **kwargs + ) + " " x += "END" return x def visit_cast(self, cast, **kwargs): return "CAST(%s AS %s)" % \ - (cast.clause._compiler_dispatch(self, **kwargs), - cast.typeclause._compiler_dispatch(self, **kwargs)) + (cast.clause._compiler_dispatch(self, **kwargs), + cast.typeclause._compiler_dispatch(self, **kwargs)) def visit_over(self, over, **kwargs): return "%s OVER (%s)" % ( over.func._compiler_dispatch(self, **kwargs), ' '.join( - '%s BY %s' % (word, clause._compiler_dispatch(self, **kwargs)) - for word, clause in ( - ('PARTITION', over.partition_by), - ('ORDER', over.order_by) - ) - if clause is not None and len(clause) + '%s BY %s' % (word, clause._compiler_dispatch(self, **kwargs)) + for word, clause in ( + ('PARTITION', over.partition_by), + ('ORDER', over.order_by) + ) + if clause is not None and len(clause) ) ) + def visit_funcfilter(self, funcfilter, **kwargs): + return "%s FILTER (WHERE %s)" % ( + funcfilter.func._compiler_dispatch(self, **kwargs), + funcfilter.criterion._compiler_dispatch(self, **kwargs) + ) + def visit_extract(self, extract, **kwargs): field = self.extract_map.get(extract.field, extract.field) - return "EXTRACT(%s FROM %s)" % (field, - extract.expr._compiler_dispatch(self, **kwargs)) + return "EXTRACT(%s FROM %s)" % ( + field, extract.expr._compiler_dispatch(self, **kwargs)) def visit_function(self, func, add_to_result_map=None, **kwargs): if add_to_result_map is not None: @@ -732,7 +808,7 @@ def visit_function(self, func, add_to_result_map=None, **kwargs): else: name = FUNCTIONS.get(func.__class__, func.name + "%(expr)s") return ".".join(list(func.packagenames) + [name]) % \ - {'expr': self.function_argspec(func, **kwargs)} + {'expr': self.function_argspec(func, **kwargs)} def visit_next_value_func(self, next_value, **kw): return self.visit_sequence(next_value.sequence) @@ -746,39 +822,42 @@ def visit_sequence(self, sequence): def function_argspec(self, func, **kwargs): return func.clause_expr._compiler_dispatch(self, **kwargs) - def visit_compound_select(self, cs, asfrom=False, - parens=True, compound_index=0, **kwargs): + parens=True, compound_index=0, **kwargs): toplevel = not self.stack entry = self._default_stack_entry if toplevel else self.stack[-1] + need_result_map = toplevel or \ + (compound_index == 0 + and entry.get('need_result_map_for_compound', False)) self.stack.append( - { - 'correlate_froms': entry['correlate_froms'], - 'iswrapper': toplevel, - 'asfrom_froms': entry['asfrom_froms'] - }) + { + 'correlate_froms': entry['correlate_froms'], + 'asfrom_froms': entry['asfrom_froms'], + 'selectable': cs, + 'need_result_map_for_compound': need_result_map + }) keyword = self.compound_keywords.get(cs.keyword) text = (" " + keyword + " ").join( - (c._compiler_dispatch(self, - asfrom=asfrom, parens=False, - compound_index=i, **kwargs) - for i, c in enumerate(cs.selects)) - ) + (c._compiler_dispatch(self, + asfrom=asfrom, parens=False, + compound_index=i, **kwargs) + for i, c in enumerate(cs.selects)) + ) group_by = cs._group_by_clause._compiler_dispatch( - self, asfrom=asfrom, **kwargs) + self, asfrom=asfrom, **kwargs) if group_by: text += " GROUP BY " + group_by text += self.order_by_clause(cs, **kwargs) - text += (cs._limit is not None or cs._offset is not None) and \ - self.limit_clause(cs) or "" + text += (cs._limit_clause is not None + or cs._offset_clause is not None) and \ + self.limit_clause(cs, **kwargs) or "" - if self.ctes and \ - compound_index == 0 and toplevel: + if self.ctes and toplevel: text = self._render_cte_clause() + text self.stack.pop(-1) @@ -791,26 +870,26 @@ def visit_unary(self, unary, **kw): if unary.operator: if unary.modifier: raise exc.CompileError( - "Unary expression does not support operator " - "and modifier simultaneously") + "Unary expression does not support operator " + "and modifier simultaneously") disp = getattr(self, "visit_%s_unary_operator" % - unary.operator.__name__, None) + unary.operator.__name__, None) if disp: return disp(unary, unary.operator, **kw) else: - return self._generate_generic_unary_operator(unary, - OPERATORS[unary.operator], **kw) + return self._generate_generic_unary_operator( + unary, OPERATORS[unary.operator], **kw) elif unary.modifier: disp = getattr(self, "visit_%s_unary_modifier" % - unary.modifier.__name__, None) + unary.modifier.__name__, None) if disp: return disp(unary, unary.modifier, **kw) else: - return self._generate_generic_unary_modifier(unary, - OPERATORS[unary.modifier], **kw) + return self._generate_generic_unary_modifier( + unary, OPERATORS[unary.modifier], **kw) else: raise exc.CompileError( - "Unary expression has no operator or modifier") + "Unary expression has no operator or modifier") def visit_istrue_unary_operator(self, element, operator, **kw): if self.dialect.supports_native_boolean: @@ -824,41 +903,45 @@ def visit_isfalse_unary_operator(self, element, operator, **kw): else: return "%s = 0" % self.process(element.element, **kw) - def visit_binary(self, binary, **kw): + def visit_notmatch_op_binary(self, binary, operator, **kw): + return "NOT %s" % self.visit_binary( + binary, override_operator=operators.match_op) + + def visit_binary(self, binary, override_operator=None, **kw): # don't allow "? = ?" to render if self.ansi_bind_rules and \ - isinstance(binary.left, elements.BindParameter) and \ - isinstance(binary.right, elements.BindParameter): + isinstance(binary.left, elements.BindParameter) and \ + isinstance(binary.right, elements.BindParameter): kw['literal_binds'] = True - operator = binary.operator - disp = getattr(self, "visit_%s_binary" % operator.__name__, None) + operator_ = override_operator or binary.operator + disp = getattr(self, "visit_%s_binary" % operator_.__name__, None) if disp: - return disp(binary, operator, **kw) + return disp(binary, operator_, **kw) else: try: - opstring = OPERATORS[operator] + opstring = OPERATORS[operator_] except KeyError: - raise exc.UnsupportedCompilationError(self, operator) + raise exc.UnsupportedCompilationError(self, operator_) else: return self._generate_generic_binary(binary, opstring, **kw) def visit_custom_op_binary(self, element, operator, **kw): - return self._generate_generic_binary(element, - " " + operator.opstring + " ", **kw) + return self._generate_generic_binary( + element, " " + operator.opstring + " ", **kw) def visit_custom_op_unary_operator(self, element, operator, **kw): - return self._generate_generic_unary_operator(element, - operator.opstring + " ", **kw) + return self._generate_generic_unary_operator( + element, operator.opstring + " ", **kw) def visit_custom_op_unary_modifier(self, element, operator, **kw): - return self._generate_generic_unary_modifier(element, - " " + operator.opstring, **kw) + return self._generate_generic_unary_modifier( + element, " " + operator.opstring, **kw) def _generate_generic_binary(self, binary, opstring, **kw): return binary.left._compiler_dispatch(self, **kw) + \ - opstring + \ - binary.right._compiler_dispatch(self, **kw) + opstring + \ + binary.right._compiler_dispatch(self, **kw) def _generate_generic_unary_operator(self, unary, opstring, **kw): return opstring + unary.element._compiler_dispatch(self, **kw) @@ -886,16 +969,16 @@ def visit_startswith_op_binary(self, binary, operator, **kw): binary = binary._clone() percent = self._like_percent_literal binary.right = percent.__radd__( - binary.right - ) + binary.right + ) return self.visit_like_op_binary(binary, operator, **kw) def visit_notstartswith_op_binary(self, binary, operator, **kw): binary = binary._clone() percent = self._like_percent_literal binary.right = percent.__radd__( - binary.right - ) + binary.right + ) return self.visit_notlike_op_binary(binary, operator, **kw) def visit_endswith_op_binary(self, binary, operator, **kw): @@ -915,8 +998,8 @@ def visit_like_op_binary(self, binary, operator, **kw): # TODO: use ternary here, not "and"/ "or" return '%s LIKE %s' % ( - binary.left._compiler_dispatch(self, **kw), - binary.right._compiler_dispatch(self, **kw)) \ + binary.left._compiler_dispatch(self, **kw), + binary.right._compiler_dispatch(self, **kw)) \ + ( ' ESCAPE ' + self.render_literal_value(escape, sqltypes.STRINGTYPE) @@ -926,8 +1009,8 @@ def visit_like_op_binary(self, binary, operator, **kw): def visit_notlike_op_binary(self, binary, operator, **kw): escape = binary.modifiers.get("escape", None) return '%s NOT LIKE %s' % ( - binary.left._compiler_dispatch(self, **kw), - binary.right._compiler_dispatch(self, **kw)) \ + binary.left._compiler_dispatch(self, **kw), + binary.right._compiler_dispatch(self, **kw)) \ + ( ' ESCAPE ' + self.render_literal_value(escape, sqltypes.STRINGTYPE) @@ -937,8 +1020,8 @@ def visit_notlike_op_binary(self, binary, operator, **kw): def visit_ilike_op_binary(self, binary, operator, **kw): escape = binary.modifiers.get("escape", None) return 'lower(%s) LIKE lower(%s)' % ( - binary.left._compiler_dispatch(self, **kw), - binary.right._compiler_dispatch(self, **kw)) \ + binary.left._compiler_dispatch(self, **kw), + binary.right._compiler_dispatch(self, **kw)) \ + ( ' ESCAPE ' + self.render_literal_value(escape, sqltypes.STRINGTYPE) @@ -948,32 +1031,44 @@ def visit_ilike_op_binary(self, binary, operator, **kw): def visit_notilike_op_binary(self, binary, operator, **kw): escape = binary.modifiers.get("escape", None) return 'lower(%s) NOT LIKE lower(%s)' % ( - binary.left._compiler_dispatch(self, **kw), - binary.right._compiler_dispatch(self, **kw)) \ + binary.left._compiler_dispatch(self, **kw), + binary.right._compiler_dispatch(self, **kw)) \ + ( ' ESCAPE ' + self.render_literal_value(escape, sqltypes.STRINGTYPE) if escape else '' ) + def visit_between_op_binary(self, binary, operator, **kw): + symmetric = binary.modifiers.get("symmetric", False) + return self._generate_generic_binary( + binary, " BETWEEN SYMMETRIC " + if symmetric else " BETWEEN ", **kw) + + def visit_notbetween_op_binary(self, binary, operator, **kw): + symmetric = binary.modifiers.get("symmetric", False) + return self._generate_generic_binary( + binary, " NOT BETWEEN SYMMETRIC " + if symmetric else " NOT BETWEEN ", **kw) + def visit_bindparam(self, bindparam, within_columns_clause=False, - literal_binds=False, - skip_bind_expression=False, - **kwargs): + literal_binds=False, + skip_bind_expression=False, + **kwargs): if not skip_bind_expression and bindparam.type._has_bind_expression: bind_expression = bindparam.type.bind_expression(bindparam) return self.process(bind_expression, skip_bind_expression=True) if literal_binds or \ - (within_columns_clause and \ + (within_columns_clause and self.ansi_bind_rules): if bindparam.value is None and bindparam.callable is None: raise exc.CompileError("Bind parameter '%s' without a " - "renderable value not allowed here." - % bindparam.key) - return self.render_literal_bindparam(bindparam, - within_columns_clause=True, **kwargs) + "renderable value not allowed here." + % bindparam.key) + return self.render_literal_bindparam( + bindparam, within_columns_clause=True, **kwargs) name = self._truncate_bindparam(bindparam) @@ -981,13 +1076,13 @@ def visit_bindparam(self, bindparam, within_columns_clause=False, existing = self.binds[name] if existing is not bindparam: if (existing.unique or bindparam.unique) and \ - not existing.proxy_set.intersection( - bindparam.proxy_set): + not existing.proxy_set.intersection( + bindparam.proxy_set): raise exc.CompileError( - "Bind parameter '%s' conflicts with " - "unique bind parameter of the same name" % - bindparam.key - ) + "Bind parameter '%s' conflicts with " + "unique bind parameter of the same name" % + bindparam.key + ) elif existing._is_crud or bindparam._is_crud: raise exc.CompileError( "bindparam() name '%s' is reserved " @@ -995,8 +1090,8 @@ def visit_bindparam(self, bindparam, within_columns_clause=False, "clause of this " "insert/update statement. Please use a " "name other than column name when using bindparam() " - "with insert() or update() (for example, 'b_%s')." - % (bindparam.key, bindparam.key) + "with insert() or update() (for example, 'b_%s')." % + (bindparam.key, bindparam.key) ) self.binds[bindparam.key] = self.binds[name] = bindparam @@ -1023,7 +1118,7 @@ def render_literal_value(self, value, type_): return processor(value) else: raise NotImplementedError( - "Don't know how to literal-quote value %r" % value) + "Don't know how to literal-quote value %r" % value) def _truncate_bindparam(self, bindparam): if bindparam in self.bind_names: @@ -1044,10 +1139,10 @@ def _truncated_identifier(self, ident_class, name): anonname = name.apply_map(self.anon_map) - if len(anonname) > self.label_length: + if len(anonname) > self.label_length - 6: counter = self.truncated_names.get(ident_class, 1) truncname = anonname[0:max(self.label_length - 6, 0)] + \ - "_" + hex(counter)[2:] + "_" + hex(counter)[2:] self.truncated_names[ident_class] = counter + 1 else: truncname = anonname @@ -1072,11 +1167,9 @@ def bindparam_string(self, name, positional_names=None, **kw): return self.bindtemplate % {'name': name} def visit_cte(self, cte, asfrom=False, ashint=False, - fromhints=None, - **kwargs): + fromhints=None, + **kwargs): self._init_cte_state() - if self.positional: - kwargs['positional_names'] = self.cte_positional if isinstance(cte.name, elements._truncated_label): cte_name = self._truncated_identifier("alias", cte.name) @@ -1096,19 +1189,20 @@ def visit_cte(self, cte, asfrom=False, ashint=False, del self.ctes[existing_cte] else: raise exc.CompileError( - "Multiple, unrelated CTEs found with " - "the same name: %r" % - cte_name) + "Multiple, unrelated CTEs found with " + "the same name: %r" % + cte_name) self.ctes_by_name[cte_name] = cte if cte._cte_alias is not None: orig_cte = cte._cte_alias if orig_cte not in self.ctes: - self.visit_cte(orig_cte) + self.visit_cte(orig_cte, **kwargs) cte_alias_name = cte._cte_alias.name if isinstance(cte_alias_name, elements._truncated_label): - cte_alias_name = self._truncated_identifier("alias", cte_alias_name) + cte_alias_name = self._truncated_identifier( + "alias", cte_alias_name) else: orig_cte = cte cte_alias_name = None @@ -1124,29 +1218,38 @@ def visit_cte(self, cte, asfrom=False, ashint=False, else: assert False recur_cols = [c for c in - util.unique_list(col_source.inner_columns) - if c is not None] + util.unique_list(col_source.inner_columns) + if c is not None] text += "(%s)" % (", ".join( - self.preparer.format_column(ident) - for ident in recur_cols)) + self.preparer.format_column(ident) + for ident in recur_cols)) + + if self.positional: + kwargs['positional_names'] = self.cte_positional[cte] = [] + text += " AS \n" + \ - cte.original._compiler_dispatch( - self, asfrom=True, **kwargs - ) + cte.original._compiler_dispatch( + self, asfrom=True, **kwargs + ) + + if cte._suffixes: + text += " " + self._generate_prefixes( + cte, cte._suffixes, **kwargs) + self.ctes[cte] = text if asfrom: if cte_alias_name: text = self.preparer.format_alias(cte, cte_alias_name) - text += " AS " + cte_name + text += self.get_render_as_alias_suffix(cte_name) else: return self.preparer.format_alias(cte, cte_name) return text def visit_alias(self, alias, asfrom=False, ashint=False, - iscrud=False, - fromhints=None, **kwargs): + iscrud=False, + fromhints=None, **kwargs): if asfrom or ashint: if isinstance(alias.name, elements._truncated_label): alias_name = self._truncated_identifier("alias", alias.name) @@ -1157,46 +1260,38 @@ def visit_alias(self, alias, asfrom=False, ashint=False, return self.preparer.format_alias(alias, alias_name) elif asfrom: ret = alias.original._compiler_dispatch(self, - asfrom=True, **kwargs) + \ - " AS " + \ - self.preparer.format_alias(alias, alias_name) + asfrom=True, **kwargs) + \ + self.get_render_as_alias_suffix( + self.preparer.format_alias(alias, alias_name)) if fromhints and alias in fromhints: ret = self.format_from_hint_text(ret, alias, - fromhints[alias], iscrud) + fromhints[alias], iscrud) return ret else: return alias.original._compiler_dispatch(self, **kwargs) + def get_render_as_alias_suffix(self, alias_name_text): + return " AS " + alias_name_text + def _add_to_result_map(self, keyname, name, objects, type_): - if not self.dialect.case_sensitive: - keyname = keyname.lower() - - if keyname in self.result_map: - # conflicting keyname, just double up the list - # of objects. this will cause an "ambiguous name" - # error if an attempt is made by the result set to - # access. - e_name, e_obj, e_type = self.result_map[keyname] - self.result_map[keyname] = e_name, e_obj + objects, e_type - else: - self.result_map[keyname] = name, objects, type_ + self._result_columns.append((keyname, name, objects, type_)) def _label_select_column(self, select, column, - populate_result_map, - asfrom, column_clause_args, - name=None, - within_columns_clause=True): + populate_result_map, + asfrom, column_clause_args, + name=None, + within_columns_clause=True): """produce labeled columns present in a select().""" if column.type._has_column_expression and \ populate_result_map: col_expr = column.type.column_expression(column) add_to_result_map = lambda keyname, name, objects, type_: \ - self._add_to_result_map( - keyname, name, - objects + (column,), type_) + self._add_to_result_map( + keyname, name, + objects + (column,), type_) else: col_expr = column if populate_result_map: @@ -1209,19 +1304,19 @@ def _label_select_column(self, select, column, elif isinstance(column, elements.Label): if col_expr is not column: result_expr = _CompileLabel( - col_expr, - column.name, - alt_names=(column.element,) - ) + col_expr, + column.name, + alt_names=(column.element,) + ) else: result_expr = col_expr elif select is not None and name: result_expr = _CompileLabel( - col_expr, - name, - alt_names=(column._key_label,) - ) + col_expr, + name, + alt_names=(column._key_label,) + ) elif \ asfrom and \ @@ -1230,30 +1325,37 @@ def _label_select_column(self, select, column, column.table is not None and \ not isinstance(column.table, selectable.Select): result_expr = _CompileLabel(col_expr, - elements._as_truncated(column.name), - alt_names=(column.key,)) - elif not isinstance(column, - (elements.UnaryExpression, elements.TextClause)) \ - and (not hasattr(column, 'name') or \ - isinstance(column, functions.Function)): + elements._as_truncated(column.name), + alt_names=(column.key,)) + elif ( + not isinstance(column, elements.TextClause) and + ( + not isinstance(column, elements.UnaryExpression) or + column.wraps_column_expression + ) and + ( + not hasattr(column, 'name') or + isinstance(column, functions.Function) + ) + ): result_expr = _CompileLabel(col_expr, column.anon_label) elif col_expr is not column: # TODO: are we sure "column" has a .name and .key here ? # assert isinstance(column, elements.ColumnClause) result_expr = _CompileLabel(col_expr, - elements._as_truncated(column.name), - alt_names=(column.key,)) + elements._as_truncated(column.name), + alt_names=(column.key,)) else: result_expr = col_expr column_clause_args.update( - within_columns_clause=within_columns_clause, - add_to_result_map=add_to_result_map - ) + within_columns_clause=within_columns_clause, + add_to_result_map=add_to_result_map + ) return result_expr._compiler_dispatch( - self, - **column_clause_args - ) + self, + **column_clause_args + ) def format_from_hint_text(self, sqltext, table, hint, iscrud): hinttext = self.get_from_hint_text(table, hint) @@ -1270,6 +1372,9 @@ def get_from_hint_text(self, table, text): def get_crud_hint_text(self, table, text): return None + def get_statement_hint_text(self, hint_texts): + return " ".join(hint_texts) + def _transform_select_for_nested_joins(self, select): """Rewrite any "a JOIN (b JOIN c)" expression as "a JOIN (select * from b JOIN c) AS anon", to support @@ -1280,7 +1385,6 @@ def _transform_select_for_nested_joins(self, select): cloned = {} column_translate = [{}] - def visit(element, **kw): if element in column_translate[-1]: return column_translate[-1][element] @@ -1291,7 +1395,7 @@ def visit(element, **kw): newelem = cloned[element] = element._clone() if newelem.is_selectable and newelem._is_join and \ - isinstance(newelem.right, selectable.FromGrouping): + isinstance(newelem.right, selectable.FromGrouping): newelem._reset_exported() newelem.left = visit(newelem.left, **kw) @@ -1299,8 +1403,9 @@ def visit(element, **kw): right = visit(newelem.right, **kw) selectable_ = selectable.Select( - [right.element], - use_labels=True).alias() + [right.element], + use_labels=True).alias() + for c in selectable_.c: c._key_label = c.key c._label = c.name @@ -1335,14 +1440,16 @@ def visit(element, **kw): newelem.onclause = visit(newelem.onclause, **kw) - elif newelem.is_selectable and newelem._is_from_container: - # if we hit an Alias or CompoundSelect, put a marker in the - # stack. + elif newelem._is_from_container: + # if we hit an Alias, CompoundSelect or ScalarSelect, put a + # marker in the stack. kw['transform_clue'] = 'select_container' newelem._copy_internals(clone=visit, **kw) elif newelem.is_selectable and newelem._is_select: - barrier_select = kw.get('transform_clue', None) == 'select_container' - # if we're still descended from an Alias/CompoundSelect, we're + barrier_select = kw.get('transform_clue', None) == \ + 'select_container' + # if we're still descended from an + # Alias/CompoundSelect/ScalarSelect, we're # in a FROM clause, so start with a new translate collection if barrier_select: column_translate.append({}) @@ -1357,24 +1464,25 @@ def visit(element, **kw): return visit(select) - def _transform_result_map_for_nested_joins(self, select, transformed_select): + def _transform_result_map_for_nested_joins( + self, select, transformed_select): inner_col = dict((c._key_label, c) for - c in transformed_select.inner_columns) + c in transformed_select.inner_columns) d = dict( - (inner_col[c._key_label], c) - for c in select.inner_columns - ) - for key, (name, objs, typ) in list(self.result_map.items()): - objs = tuple([d.get(col, col) for col in objs]) - self.result_map[key] = (name, objs, typ) + (inner_col[c._key_label], c) + for c in select.inner_columns + ) + self._result_columns = [ + (key, name, tuple([d.get(col, col) for col in objs]), typ) + for key, name, objs, typ in self._result_columns + ] _default_stack_entry = util.immutabledict([ - ('iswrapper', False), - ('correlate_froms', frozenset()), - ('asfrom_froms', frozenset()) - ]) + ('correlate_froms', frozenset()), + ('asfrom_froms', frozenset()) + ]) def _display_froms_for_select(self, select, asfrom): # utility method to help external dialects @@ -1389,134 +1497,205 @@ def _display_froms_for_select(self, select, asfrom): if asfrom: froms = select._get_display_froms( - explicit_correlate_froms=\ - correlate_froms.difference(asfrom_froms), - implicit_correlate_froms=()) + explicit_correlate_froms=correlate_froms.difference( + asfrom_froms), + implicit_correlate_froms=()) else: froms = select._get_display_froms( - explicit_correlate_froms=correlate_froms, - implicit_correlate_froms=asfrom_froms) + explicit_correlate_froms=correlate_froms, + implicit_correlate_froms=asfrom_froms) return froms def visit_select(self, select, asfrom=False, parens=True, - iswrapper=False, fromhints=None, - compound_index=0, - force_result_map=False, - positional_names=None, - nested_join_translation=False, - **kwargs): + fromhints=None, + compound_index=0, + nested_join_translation=False, + select_wraps_for=None, + **kwargs): needs_nested_translation = \ - select.use_labels and \ - not nested_join_translation and \ - not self.stack and \ - not self.dialect.supports_right_nested_joins + select.use_labels and \ + not nested_join_translation and \ + not self.stack and \ + not self.dialect.supports_right_nested_joins if needs_nested_translation: - transformed_select = self._transform_select_for_nested_joins(select) + transformed_select = self._transform_select_for_nested_joins( + select) text = self.visit_select( - transformed_select, asfrom=asfrom, parens=parens, - iswrapper=iswrapper, fromhints=fromhints, - compound_index=compound_index, - force_result_map=force_result_map, - positional_names=positional_names, - nested_join_translation=True, **kwargs - ) + transformed_select, asfrom=asfrom, parens=parens, + fromhints=fromhints, + compound_index=compound_index, + nested_join_translation=True, **kwargs + ) toplevel = not self.stack entry = self._default_stack_entry if toplevel else self.stack[-1] + populate_result_map = toplevel or \ + ( + compound_index == 0 and entry.get( + 'need_result_map_for_compound', False) + ) or entry.get('need_result_map_for_nested', False) - populate_result_map = force_result_map or ( - compound_index == 0 and ( - toplevel or \ - entry['iswrapper'] - ) - ) + # this was first proposed as part of #3372; however, it is not + # reached in current tests and could possibly be an assertion + # instead. + if not populate_result_map and 'add_to_result_map' in kwargs: + del kwargs['add_to_result_map'] if needs_nested_translation: if populate_result_map: self._transform_result_map_for_nested_joins( - select, transformed_select) + select, transformed_select) return text - correlate_froms = entry['correlate_froms'] - asfrom_froms = entry['asfrom_froms'] + froms = self._setup_select_stack(select, entry, asfrom) - if asfrom: - froms = select._get_display_froms( - explicit_correlate_froms= - correlate_froms.difference(asfrom_froms), - implicit_correlate_froms=()) - else: - froms = select._get_display_froms( - explicit_correlate_froms=correlate_froms, - implicit_correlate_froms=asfrom_froms) + column_clause_args = kwargs.copy() + column_clause_args.update({ + 'within_label_clause': False, + 'within_columns_clause': False + }) - new_correlate_froms = set(selectable._from_objects(*froms)) - all_correlate_froms = new_correlate_froms.union(correlate_froms) + text = "SELECT " # we're off to a good start ! - new_entry = { - 'asfrom_froms': new_correlate_froms, - 'iswrapper': iswrapper, - 'correlate_froms': all_correlate_froms - } - self.stack.append(new_entry) + if select._hints: + hint_text, byfrom = self._setup_select_hints(select) + if hint_text: + text += hint_text + " " + else: + byfrom = None - column_clause_args = kwargs.copy() - column_clause_args.update({ - 'positional_names': positional_names, - 'within_label_clause': False, - 'within_columns_clause': False - }) + if select._prefixes: + text += self._generate_prefixes( + select, select._prefixes, **kwargs) + text += self.get_select_precolumns(select, **kwargs) # the actual list of columns to print in the SELECT column list. inner_columns = [ c for c in [ - self._label_select_column(select, - column, - populate_result_map, asfrom, - column_clause_args, - name=name) + self._label_select_column( + select, + column, + populate_result_map, asfrom, + column_clause_args, + name=name) for name, column in select._columns_plus_names - ] + ] if c is not None ] - text = "SELECT " # we're off to a good start ! + if populate_result_map and select_wraps_for is not None: + # if this select is a compiler-generated wrapper, + # rewrite the targeted columns in the result map - if select._hints: - byfrom = dict([ - (from_, hinttext % { - 'name':from_._compiler_dispatch( - self, ashint=True) - }) - for (from_, dialect), hinttext in - select._hints.items() - if dialect in ('*', self.dialect.name) - ]) - hint_text = self.get_select_hint_text(byfrom) - if hint_text: - text += hint_text + " " + translate = dict( + zip( + [name for (key, name) in select._columns_plus_names], + [name for (key, name) in + select_wraps_for._columns_plus_names]) + ) - if select._prefixes: - text += self._generate_prefixes(select, select._prefixes, **kwargs) + self._result_columns = [ + (key, name, tuple(translate.get(o, o) for o in obj), type_) + for key, name, obj, type_ in self._result_columns + ] + + text = self._compose_select_body( + text, select, inner_columns, froms, byfrom, kwargs) + + if select._statement_hints: + per_dialect = [ + ht for (dialect_name, ht) + in select._statement_hints + if dialect_name in ('*', self.dialect.name) + ] + if per_dialect: + text += " " + self.get_statement_hint_text(per_dialect) - text += self.get_select_precolumns(select) + if self.ctes and self._is_toplevel_select(select): + text = self._render_cte_clause() + text + + if select._suffixes: + text += " " + self._generate_prefixes( + select, select._suffixes, **kwargs) + + self.stack.pop(-1) + + if asfrom and parens: + return "(" + text + ")" + else: + return text + + def _is_toplevel_select(self, select): + """Return True if the stack is placed at the given select, and + is also the outermost SELECT, meaning there is either no stack + before this one, or the enclosing stack is a topmost INSERT. + + """ + return ( + self.stack[-1]['selectable'] is select and + ( + len(self.stack) == 1 or self.isinsert and len(self.stack) == 2 + and self.statement is self.stack[0]['selectable'] + ) + ) + + def _setup_select_hints(self, select): + byfrom = dict([ + (from_, hinttext % { + 'name': from_._compiler_dispatch( + self, ashint=True) + }) + for (from_, dialect), hinttext in + select._hints.items() + if dialect in ('*', self.dialect.name) + ]) + hint_text = self.get_select_hint_text(byfrom) + return hint_text, byfrom + + def _setup_select_stack(self, select, entry, asfrom): + correlate_froms = entry['correlate_froms'] + asfrom_froms = entry['asfrom_froms'] + + if asfrom: + froms = select._get_display_froms( + explicit_correlate_froms=correlate_froms.difference( + asfrom_froms), + implicit_correlate_froms=()) + else: + froms = select._get_display_froms( + explicit_correlate_froms=correlate_froms, + implicit_correlate_froms=asfrom_froms) + + new_correlate_froms = set(selectable._from_objects(*froms)) + all_correlate_froms = new_correlate_froms.union(correlate_froms) + + new_entry = { + 'asfrom_froms': new_correlate_froms, + 'correlate_froms': all_correlate_froms, + 'selectable': select, + } + self.stack.append(new_entry) + return froms + + def _compose_select_body( + self, text, select, inner_columns, froms, byfrom, kwargs): text += ', '.join(inner_columns) if froms: text += " \nFROM " if select._hints: - text += ', '.join([f._compiler_dispatch(self, - asfrom=True, fromhints=byfrom, - **kwargs) - for f in froms]) + text += ', '.join( + [f._compiler_dispatch(self, asfrom=True, + fromhints=byfrom, **kwargs) + for f in froms]) else: - text += ', '.join([f._compiler_dispatch(self, - asfrom=True, **kwargs) - for f in froms]) + text += ', '.join( + [f._compiler_dispatch(self, asfrom=True, **kwargs) + for f in froms]) else: text += self.default_from() @@ -1527,7 +1706,7 @@ def visit_select(self, select, asfrom=False, parens=True, if select._group_by_clause.clauses: group_by = select._group_by_clause._compiler_dispatch( - self, **kwargs) + self, **kwargs) if group_by: text += " GROUP BY " + group_by @@ -1537,45 +1716,34 @@ def visit_select(self, select, asfrom=False, parens=True, text += " \nHAVING " + t if select._order_by_clause.clauses: - if self.dialect.supports_simple_order_by_label: - order_by_select = select - else: - order_by_select = None - - text += self.order_by_clause(select, - order_by_select=order_by_select, **kwargs) + text += self.order_by_clause(select, **kwargs) - if select._limit is not None or select._offset is not None: - text += self.limit_clause(select) + if (select._limit_clause is not None or + select._offset_clause is not None): + text += self.limit_clause(select, **kwargs) if select._for_update_arg is not None: - text += self.for_update_clause(select) + text += self.for_update_clause(select, **kwargs) - if self.ctes and \ - compound_index == 0 and toplevel: - text = self._render_cte_clause() + text - - self.stack.pop(-1) - - if asfrom and parens: - return "(" + text + ")" - else: - return text + return text def _generate_prefixes(self, stmt, prefixes, **kw): clause = " ".join( - prefix._compiler_dispatch(self, **kw) - for prefix, dialect_name in prefixes - if dialect_name is None or - dialect_name == self.dialect.name - ) + prefix._compiler_dispatch(self, **kw) + for prefix, dialect_name in prefixes + if dialect_name is None or + dialect_name == self.dialect.name + ) if clause: clause += " " return clause def _render_cte_clause(self): if self.positional: - self.positiontup = self.cte_positional + self.positiontup + self.positiontup = sum([ + self.cte_positional[cte] + for cte in self.ctes], []) + \ + self.positiontup cte_text = self.get_cte_preamble(self.ctes_recursive) + " " cte_text += ", \n".join( [txt for txt in self.ctes.values()] @@ -1589,7 +1757,7 @@ def get_cte_preamble(self, recursive): else: return "WITH" - def get_select_precolumns(self, select): + def get_select_precolumns(self, select, **kw): """Called when building a ``SELECT`` statement, position is just before column list. @@ -1603,35 +1771,35 @@ def order_by_clause(self, select, **kw): else: return "" - def for_update_clause(self, select): + def for_update_clause(self, select, **kw): return " FOR UPDATE" def returning_clause(self, stmt, returning_cols): raise exc.CompileError( - "RETURNING is not supported by this " - "dialect's statement compiler.") + "RETURNING is not supported by this " + "dialect's statement compiler.") - def limit_clause(self, select): + def limit_clause(self, select, **kw): text = "" - if select._limit is not None: - text += "\n LIMIT " + self.process(elements.literal(select._limit)) - if select._offset is not None: - if select._limit is None: + if select._limit_clause is not None: + text += "\n LIMIT " + self.process(select._limit_clause, **kw) + if select._offset_clause is not None: + if select._limit_clause is None: text += "\n LIMIT -1" - text += " OFFSET " + self.process(elements.literal(select._offset)) + text += " OFFSET " + self.process(select._offset_clause, **kw) return text def visit_table(self, table, asfrom=False, iscrud=False, ashint=False, - fromhints=None, **kwargs): + fromhints=None, use_schema=True, **kwargs): if asfrom or ashint: - if getattr(table, "schema", None): + if use_schema and getattr(table, "schema", None): ret = self.preparer.quote_schema(table.schema) + \ - "." + self.preparer.quote(table.name) + "." + self.preparer.quote(table.name) else: ret = self.preparer.quote(table.name) if fromhints and table in fromhints: ret = self.format_from_hint_text(ret, table, - fromhints[table], iscrud) + fromhints[table], iscrud) return ret else: return "" @@ -1646,27 +1814,32 @@ def visit_join(self, join, asfrom=False, **kwargs): ) def visit_insert(self, insert_stmt, **kw): + self.stack.append( + {'correlate_froms': set(), + "asfrom_froms": set(), + "selectable": insert_stmt}) + self.isinsert = True - colparams = self._get_colparams(insert_stmt, **kw) + crud_params = crud._get_crud_params(self, insert_stmt, **kw) - if not colparams and \ + if not crud_params and \ not self.dialect.supports_default_values and \ not self.dialect.supports_empty_insert: raise exc.CompileError("The '%s' dialect with current database " - "version settings does not support empty " - "inserts." % - self.dialect.name) + "version settings does not support empty " + "inserts." % + self.dialect.name) if insert_stmt._has_multi_parameters: if not self.dialect.supports_multivalues_insert: - raise exc.CompileError("The '%s' dialect with current database " - "version settings does not support " - "in-place multirow inserts." % - self.dialect.name) - colparams_single = colparams[0] + raise exc.CompileError( + "The '%s' dialect with current database " + "version settings does not support " + "in-place multirow inserts." % + self.dialect.name) + crud_params_single = crud_params[0] else: - colparams_single = colparams - + crud_params_single = crud_params preparer = self.preparer supports_default_values = self.dialect.supports_default_values @@ -1675,7 +1848,7 @@ def visit_insert(self, insert_stmt, **kw): if insert_stmt._prefixes: text += self._generate_prefixes(insert_stmt, - insert_stmt._prefixes, **kw) + insert_stmt._prefixes, **kw) text += "INTO " table_text = preparer.format_table(insert_stmt.table) @@ -1689,46 +1862,48 @@ def visit_insert(self, insert_stmt, **kw): ]) if insert_stmt.table in dialect_hints: table_text = self.format_from_hint_text( - table_text, - insert_stmt.table, - dialect_hints[insert_stmt.table], - True - ) + table_text, + insert_stmt.table, + dialect_hints[insert_stmt.table], + True + ) text += table_text - if colparams_single or not supports_default_values: + if crud_params_single or not supports_default_values: text += " (%s)" % ', '.join([preparer.format_column(c[0]) - for c in colparams_single]) + for c in crud_params_single]) if self.returning or insert_stmt._returning: self.returning = self.returning or insert_stmt._returning returning_clause = self.returning_clause( - insert_stmt, self.returning) + insert_stmt, self.returning) if self.returning_precedes_values: text += " " + returning_clause if insert_stmt.select is not None: - text += " %s" % self.process(insert_stmt.select, **kw) - elif not colparams and supports_default_values: + text += " %s" % self.process(self._insert_from_select, **kw) + elif not crud_params and supports_default_values: text += " DEFAULT VALUES" elif insert_stmt._has_multi_parameters: text += " VALUES %s" % ( - ", ".join( - "(%s)" % ( - ', '.join(c[1] for c in colparam_set) - ) - for colparam_set in colparams - ) - ) + ", ".join( + "(%s)" % ( + ', '.join(c[1] for c in crud_param_set) + ) + for crud_param_set in crud_params + ) + ) else: text += " VALUES (%s)" % \ - ', '.join([c[1] for c in colparams]) + ', '.join([c[1] for c in crud_params]) if self.returning and not self.returning_precedes_values: text += " " + returning_clause + self.stack.pop(-1) + return text def update_limit_clause(self, update_stmt): @@ -1736,7 +1911,7 @@ def update_limit_clause(self, update_stmt): return None def update_tables_clause(self, update_stmt, from_table, - extra_froms, **kw): + extra_froms, **kw): """Provide a hook to override the initial table clause in an UPDATE statement. @@ -1744,12 +1919,12 @@ def update_tables_clause(self, update_stmt, from_table, """ return from_table._compiler_dispatch(self, asfrom=True, - iscrud=True, **kw) + iscrud=True, **kw) def update_from_clause(self, update_stmt, - from_table, extra_froms, - from_hints, - **kw): + from_table, extra_froms, + from_hints, + **kw): """Provide a hook to override the generation of an UPDATE..FROM clause. @@ -1757,15 +1932,15 @@ def update_from_clause(self, update_stmt, """ return "FROM " + ', '.join( - t._compiler_dispatch(self, asfrom=True, - fromhints=from_hints, **kw) - for t in extra_froms) + t._compiler_dispatch(self, asfrom=True, + fromhints=from_hints, **kw) + for t in extra_froms) def visit_update(self, update_stmt, **kw): self.stack.append( - {'correlate_froms': set([update_stmt.table]), - "iswrapper": False, - "asfrom_froms": set([update_stmt.table])}) + {'correlate_froms': set([update_stmt.table]), + "asfrom_froms": set([update_stmt.table]), + "selectable": update_stmt}) self.isupdate = True @@ -1775,12 +1950,12 @@ def visit_update(self, update_stmt, **kw): if update_stmt._prefixes: text += self._generate_prefixes(update_stmt, - update_stmt._prefixes, **kw) + update_stmt._prefixes, **kw) table_text = self.update_tables_clause(update_stmt, update_stmt.table, extra_froms, **kw) - colparams = self._get_colparams(update_stmt, **kw) + crud_params = crud._get_crud_params(self, update_stmt, **kw) if update_stmt._hints: dialect_hints = dict([ @@ -1791,11 +1966,11 @@ def visit_update(self, update_stmt, **kw): ]) if update_stmt.table in dialect_hints: table_text = self.format_from_hint_text( - table_text, - update_stmt.table, - dialect_hints[update_stmt.table], - True - ) + table_text, + update_stmt.table, + dialect_hints[update_stmt.table], + True + ) else: dialect_hints = None @@ -1803,31 +1978,33 @@ def visit_update(self, update_stmt, **kw): text += ' SET ' include_table = extra_froms and \ - self.render_table_with_column_in_update_from + self.render_table_with_column_in_update_from text += ', '.join( - c[0]._compiler_dispatch(self, - include_table=include_table) + - '=' + c[1] for c in colparams - ) + c[0]._compiler_dispatch(self, + include_table=include_table) + + '=' + c[1] for c in crud_params + ) if self.returning or update_stmt._returning: if not self.returning: self.returning = update_stmt._returning if self.returning_precedes_values: text += " " + self.returning_clause( - update_stmt, self.returning) + update_stmt, self.returning) if extra_froms: extra_from_text = self.update_from_clause( - update_stmt, - update_stmt.table, - extra_froms, - dialect_hints, **kw) + update_stmt, + update_stmt.table, + extra_froms, + dialect_hints, **kw) if extra_from_text: text += " " + extra_from_text if update_stmt._whereclause is not None: - text += " WHERE " + self.process(update_stmt._whereclause) + t = self.process(update_stmt._whereclause, **kw) + if t: + text += " WHERE " + t limit_clause = self.update_limit_clause(update_stmt) if limit_clause: @@ -1835,392 +2012,31 @@ def visit_update(self, update_stmt, **kw): if self.returning and not self.returning_precedes_values: text += " " + self.returning_clause( - update_stmt, self.returning) + update_stmt, self.returning) self.stack.pop(-1) return text - def _create_crud_bind_param(self, col, value, required=False, name=None): - if name is None: - name = col.key - bindparam = elements.BindParameter(name, value, - type_=col.type, required=required) - bindparam._is_crud = True - return bindparam._compiler_dispatch(self) - @util.memoized_property def _key_getters_for_crud_column(self): - if self.isupdate and self.statement._extra_froms: - # when extra tables are present, refer to the columns - # in those extra tables as table-qualified, including in - # dictionaries and when rendering bind param names. - # the "main" table of the statement remains unqualified, - # allowing the most compatibility with a non-multi-table - # statement. - _et = set(self.statement._extra_froms) - def _column_as_key(key): - str_key = elements._column_as_key(key) - if hasattr(key, 'table') and key.table in _et: - return (key.table.name, str_key) - else: - return str_key - def _getattr_col_key(col): - if col.table in _et: - return (col.table.name, col.key) - else: - return col.key - def _col_bind_name(col): - if col.table in _et: - return "%s_%s" % (col.table.name, col.key) - else: - return col.key - - else: - _column_as_key = elements._column_as_key - _getattr_col_key = _col_bind_name = operator.attrgetter("key") - - return _column_as_key, _getattr_col_key, _col_bind_name - - def _get_colparams(self, stmt, **kw): - """create a set of tuples representing column/string pairs for use - in an INSERT or UPDATE statement. - - Also generates the Compiled object's postfetch, prefetch, and - returning column collections, used for default handling and ultimately - populating the ResultProxy's prefetch_cols() and postfetch_cols() - collections. - - """ - - self.postfetch = [] - self.prefetch = [] - self.returning = [] - - # no parameters in the statement, no parameters in the - # compiled params - return binds for all columns - if self.column_keys is None and stmt.parameters is None: - return [ - (c, self._create_crud_bind_param(c, - None, required=True)) - for c in stmt.table.columns - ] - - if stmt._has_multi_parameters: - stmt_parameters = stmt.parameters[0] - else: - stmt_parameters = stmt.parameters - - # getters - these are normally just column.key, - # but in the case of mysql multi-table update, the rules for - # .key must conditionally take tablename into account - _column_as_key, _getattr_col_key, _col_bind_name = \ - self._key_getters_for_crud_column - - # if we have statement parameters - set defaults in the - # compiled params - if self.column_keys is None: - parameters = {} - else: - parameters = dict((_column_as_key(key), REQUIRED) - for key in self.column_keys - if not stmt_parameters or - key not in stmt_parameters) - - # create a list of column assignment clauses as tuples - values = [] - - if stmt_parameters is not None: - for k, v in stmt_parameters.items(): - colkey = _column_as_key(k) - if colkey is not None: - parameters.setdefault(colkey, v) - else: - # a non-Column expression on the left side; - # add it to values() in an "as-is" state, - # coercing right side to bound param - if elements._is_literal(v): - v = self.process( - elements.BindParameter(None, v, type_=k.type), - **kw) - else: - v = self.process(v.self_group(), **kw) - - values.append((k, v)) - - need_pks = self.isinsert and \ - not self.inline and \ - not stmt._returning - - implicit_returning = need_pks and \ - self.dialect.implicit_returning and \ - stmt.table.implicit_returning - - if self.isinsert: - implicit_return_defaults = implicit_returning and stmt._return_defaults - elif self.isupdate: - implicit_return_defaults = self.dialect.implicit_returning and \ - stmt.table.implicit_returning and \ - stmt._return_defaults - - if implicit_return_defaults: - if stmt._return_defaults is True: - implicit_return_defaults = set(stmt.table.c) - else: - implicit_return_defaults = set(stmt._return_defaults) - - postfetch_lastrowid = need_pks and self.dialect.postfetch_lastrowid - - check_columns = {} - - # special logic that only occurs for multi-table UPDATE - # statements - if self.isupdate and stmt._extra_froms and stmt_parameters: - normalized_params = dict( - (elements._clause_element_as_expr(c), param) - for c, param in stmt_parameters.items() - ) - affected_tables = set() - for t in stmt._extra_froms: - for c in t.c: - if c in normalized_params: - affected_tables.add(t) - check_columns[_getattr_col_key(c)] = c - value = normalized_params[c] - if elements._is_literal(value): - value = self._create_crud_bind_param( - c, value, required=value is REQUIRED, - name=_col_bind_name(c)) - else: - self.postfetch.append(c) - value = self.process(value.self_group(), **kw) - values.append((c, value)) - # determine tables which are actually - # to be updated - process onupdate and - # server_onupdate for these - for t in affected_tables: - for c in t.c: - if c in normalized_params: - continue - elif c.onupdate is not None and not c.onupdate.is_sequence: - if c.onupdate.is_clause_element: - values.append( - (c, self.process( - c.onupdate.arg.self_group(), - **kw) - ) - ) - self.postfetch.append(c) - else: - values.append( - (c, self._create_crud_bind_param( - c, None, name=_col_bind_name(c) - ) - ) - ) - self.prefetch.append(c) - elif c.server_onupdate is not None: - self.postfetch.append(c) - - if self.isinsert and stmt.select_names: - # for an insert from select, we can only use names that - # are given, so only select for those names. - cols = (stmt.table.c[_column_as_key(name)] - for name in stmt.select_names) - else: - # iterate through all table columns to maintain - # ordering, even for those cols that aren't included - cols = stmt.table.columns - - for c in cols: - col_key = _getattr_col_key(c) - if col_key in parameters and col_key not in check_columns: - value = parameters.pop(col_key) - if elements._is_literal(value): - value = self._create_crud_bind_param( - c, value, required=value is REQUIRED, - name=_col_bind_name(c) - if not stmt._has_multi_parameters - else "%s_0" % _col_bind_name(c) - ) - else: - if isinstance(value, elements.BindParameter) and \ - value.type._isnull: - value = value._clone() - value.type = c.type - - if c.primary_key and implicit_returning: - self.returning.append(c) - value = self.process(value.self_group(), **kw) - elif implicit_return_defaults and \ - c in implicit_return_defaults: - self.returning.append(c) - value = self.process(value.self_group(), **kw) - else: - self.postfetch.append(c) - value = self.process(value.self_group(), **kw) - values.append((c, value)) - - elif self.isinsert: - if c.primary_key and \ - need_pks and \ - ( - implicit_returning or - not postfetch_lastrowid or - c is not stmt.table._autoincrement_column - ): - - if implicit_returning: - if c.default is not None: - if c.default.is_sequence: - if self.dialect.supports_sequences and \ - (not c.default.optional or \ - not self.dialect.sequences_optional): - proc = self.process(c.default, **kw) - values.append((c, proc)) - self.returning.append(c) - elif c.default.is_clause_element: - values.append( - (c, - self.process(c.default.arg.self_group(), **kw)) - ) - self.returning.append(c) - else: - values.append( - (c, self._create_crud_bind_param(c, None)) - ) - self.prefetch.append(c) - else: - self.returning.append(c) - else: - if ( - c.default is not None and - ( - not c.default.is_sequence or - self.dialect.supports_sequences - ) - ) or \ - c is stmt.table._autoincrement_column and ( - self.dialect.supports_sequences or - self.dialect.preexecute_autoincrement_sequences - ): - - values.append( - (c, self._create_crud_bind_param(c, None)) - ) - - self.prefetch.append(c) - - elif c.default is not None: - if c.default.is_sequence: - if self.dialect.supports_sequences and \ - (not c.default.optional or \ - not self.dialect.sequences_optional): - proc = self.process(c.default, **kw) - values.append((c, proc)) - if implicit_return_defaults and \ - c in implicit_return_defaults: - self.returning.append(c) - elif not c.primary_key: - self.postfetch.append(c) - elif c.default.is_clause_element: - values.append( - (c, self.process(c.default.arg.self_group(), **kw)) - ) - - if implicit_return_defaults and \ - c in implicit_return_defaults: - self.returning.append(c) - elif not c.primary_key: - # dont add primary key column to postfetch - self.postfetch.append(c) - else: - values.append( - (c, self._create_crud_bind_param(c, None)) - ) - self.prefetch.append(c) - elif c.server_default is not None: - if implicit_return_defaults and \ - c in implicit_return_defaults: - self.returning.append(c) - elif not c.primary_key: - self.postfetch.append(c) - elif implicit_return_defaults and \ - c in implicit_return_defaults: - self.returning.append(c) - - elif self.isupdate: - if c.onupdate is not None and not c.onupdate.is_sequence: - if c.onupdate.is_clause_element: - values.append( - (c, self.process(c.onupdate.arg.self_group(), **kw)) - ) - if implicit_return_defaults and \ - c in implicit_return_defaults: - self.returning.append(c) - else: - self.postfetch.append(c) - else: - values.append( - (c, self._create_crud_bind_param(c, None)) - ) - self.prefetch.append(c) - elif c.server_onupdate is not None: - if implicit_return_defaults and \ - c in implicit_return_defaults: - self.returning.append(c) - else: - self.postfetch.append(c) - elif implicit_return_defaults and \ - c in implicit_return_defaults: - self.returning.append(c) - - if parameters and stmt_parameters: - check = set(parameters).intersection( - _column_as_key(k) for k in stmt.parameters - ).difference(check_columns) - if check: - raise exc.CompileError( - "Unconsumed column names: %s" % - (", ".join("%s" % c for c in check)) - ) - - if stmt._has_multi_parameters: - values_0 = values - values = [values] - - values.extend( - [ - ( - c, - self._create_crud_bind_param( - c, row[c.key], - name="%s_%d" % (c.key, i + 1) - ) - if c.key in row else param - ) - for (c, param) in values_0 - ] - for i, row in enumerate(stmt.parameters[1:]) - ) - - return values + return crud._key_getters_for_crud_column(self) def visit_delete(self, delete_stmt, **kw): self.stack.append({'correlate_froms': set([delete_stmt.table]), - "iswrapper": False, - "asfrom_froms": set([delete_stmt.table])}) + "asfrom_froms": set([delete_stmt.table]), + "selectable": delete_stmt}) self.isdelete = True text = "DELETE " if delete_stmt._prefixes: text += self._generate_prefixes(delete_stmt, - delete_stmt._prefixes, **kw) + delete_stmt._prefixes, **kw) text += "FROM " - table_text = delete_stmt.table._compiler_dispatch(self, - asfrom=True, iscrud=True) + table_text = delete_stmt.table._compiler_dispatch( + self, asfrom=True, iscrud=True) if delete_stmt._hints: dialect_hints = dict([ @@ -2231,11 +2047,11 @@ def visit_delete(self, delete_stmt, **kw): ]) if delete_stmt.table in dialect_hints: table_text = self.format_from_hint_text( - table_text, - delete_stmt.table, - dialect_hints[delete_stmt.table], - True - ) + table_text, + delete_stmt.table, + dialect_hints[delete_stmt.table], + True + ) else: dialect_hints = None @@ -2246,15 +2062,16 @@ def visit_delete(self, delete_stmt, **kw): self.returning = delete_stmt._returning if self.returning_precedes_values: text += " " + self.returning_clause( - delete_stmt, delete_stmt._returning) + delete_stmt, delete_stmt._returning) if delete_stmt._whereclause is not None: - text += " WHERE " - text += delete_stmt._whereclause._compiler_dispatch(self) + t = delete_stmt._whereclause._compiler_dispatch(self, **kw) + if t: + text += " WHERE " + t if self.returning and not self.returning_precedes_values: text += " " + self.returning_clause( - delete_stmt, delete_stmt._returning) + delete_stmt, delete_stmt._returning) self.stack.pop(-1) @@ -2265,11 +2082,11 @@ def visit_savepoint(self, savepoint_stmt): def visit_rollback_to_savepoint(self, savepoint_stmt): return "ROLLBACK TO SAVEPOINT %s" % \ - self.preparer.format_savepoint(savepoint_stmt) + self.preparer.format_savepoint(savepoint_stmt) def visit_release_savepoint(self, savepoint_stmt): return "RELEASE SAVEPOINT %s" % \ - self.preparer.format_savepoint(savepoint_stmt) + self.preparer.format_savepoint(savepoint_stmt) class DDLCompiler(Compiled): @@ -2323,11 +2140,11 @@ def visit_create_table(self, create): table = create.element preparer = self.dialect.identifier_preparer - text = "\n" + " ".join(['CREATE'] + \ - table._prefixes + \ - ['TABLE', - preparer.format_table(table), - "("]) + text = "\n" + " ".join(['CREATE'] + + table._prefixes + + ['TABLE', + preparer.format_table(table), + "("]) separator = "\n" # if only one primary key, specify it along with the column @@ -2336,8 +2153,8 @@ def visit_create_table(self, create): column = create_column.element try: processed = self.process(create_column, - first_pk=column.primary_key - and not first_pk) + first_pk=column.primary_key + and not first_pk) if processed is not None: text += separator separator = ", \n" @@ -2346,15 +2163,16 @@ def visit_create_table(self, create): first_pk = True except exc.CompileError as ce: util.raise_from_cause( - exc.CompileError(util.u("(in table '%s', column '%s'): %s") % ( - table.description, - column.name, - ce.args[0] - ))) - - const = self.create_table_constraints(table) + exc.CompileError( + util.u("(in table '%s', column '%s'): %s") % + (table.description, column.name, ce.args[0]) + )) + + const = self.create_table_constraints( + table, _include_foreign_key_constraints= + create.include_foreign_key_constraints) if const: - text += ", \n\t" + const + text += separator + "\t" + const text += "\n)%s\n\n" % self.post_create_table(table) return text @@ -2366,17 +2184,19 @@ def visit_create_column(self, create, first_pk=False): return None text = self.get_column_specification( - column, - first_pk=first_pk - ) - const = " ".join(self.process(constraint) \ - for constraint in column.constraints) + column, + first_pk=first_pk + ) + const = " ".join(self.process(constraint) + for constraint in column.constraints) if const: text += " " + const return text - def create_table_constraints(self, table): + def create_table_constraints( + self, table, + _include_foreign_key_constraints=None): # On some DB order is significant: visit PK first, then the # other constraints (engine.ReflectionTest.testbasic failed on FB2) @@ -2384,20 +2204,28 @@ def create_table_constraints(self, table): if table.primary_key: constraints.append(table.primary_key) + all_fkcs = table.foreign_key_constraints + if _include_foreign_key_constraints is not None: + omit_fkcs = all_fkcs.difference(_include_foreign_key_constraints) + else: + omit_fkcs = set() + constraints.extend([c for c in table._sorted_constraints - if c is not table.primary_key]) - - return ", \n\t".join(p for p in - (self.process(constraint) - for constraint in constraints - if ( - constraint._create_rule is None or - constraint._create_rule(self)) - and ( - not self.dialect.supports_alter or - not getattr(constraint, 'use_alter', False) - )) if p is not None - ) + if c is not table.primary_key and + c not in omit_fkcs]) + + return ", \n\t".join( + p for p in + (self.process(constraint) + for constraint in constraints + if ( + constraint._create_rule is None or + constraint._create_rule(self)) + and ( + not self.dialect.supports_alter or + not getattr(constraint, 'use_alter', False) + )) if p is not None + ) def visit_drop_table(self, drop): return "\nDROP TABLE " + self.preparer.format_table(drop.element) @@ -2405,15 +2233,13 @@ def visit_drop_table(self, drop): def visit_drop_view(self, drop): return "\nDROP VIEW " + self.preparer.format_table(drop.element) - def _verify_index_table(self, index): if index.table is None: raise exc.CompileError("Index '%s' is not associated " - "with any table." % index.name) - + "with any table." % index.name) def visit_create_index(self, create, include_schema=False, - include_table_schema=True): + include_table_schema=True): index = create.element self._verify_index_table(index) preparer = self.preparer @@ -2421,22 +2247,22 @@ def visit_create_index(self, create, include_schema=False, if index.unique: text += "UNIQUE " text += "INDEX %s ON %s (%s)" \ - % ( - self._prepared_index_name(index, - include_schema=include_schema), - preparer.format_table(index.table, - use_schema=include_table_schema), - ', '.join( - self.sql_compiler.process(expr, - include_table=False, literal_binds=True) for - expr in index.expressions) - ) + % ( + self._prepared_index_name(index, + include_schema=include_schema), + preparer.format_table(index.table, + use_schema=include_table_schema), + ', '.join( + self.sql_compiler.process( + expr, include_table=False, literal_binds=True) for + expr in index.expressions) + ) return text def visit_drop_index(self, drop): index = drop.element - return "\nDROP INDEX " + self._prepared_index_name(index, - include_schema=True) + return "\nDROP INDEX " + self._prepared_index_name( + index, include_schema=True) def _prepared_index_name(self, index, include_schema=False): if include_schema and index.table is not None and index.table.schema: @@ -2448,10 +2274,10 @@ def _prepared_index_name(self, index, include_schema=False): ident = index.name if isinstance(ident, elements._truncated_label): max_ = self.dialect.max_index_name_length or \ - self.dialect.max_identifier_length + self.dialect.max_identifier_length if len(ident) > max_: ident = ident[0:max_ - 8] + \ - "_" + util.md5_hex(ident)[-4:] + "_" + util.md5_hex(ident)[-4:] else: self.dialect.validate_identifier(ident) @@ -2469,27 +2295,48 @@ def visit_add_constraint(self, create): def visit_create_sequence(self, create): text = "CREATE SEQUENCE %s" % \ - self.preparer.format_sequence(create.element) + self.preparer.format_sequence(create.element) if create.element.increment is not None: text += " INCREMENT BY %d" % create.element.increment if create.element.start is not None: text += " START WITH %d" % create.element.start + if create.element.minvalue is not None: + text += " MINVALUE %d" % create.element.minvalue + if create.element.maxvalue is not None: + text += " MAXVALUE %d" % create.element.maxvalue + if create.element.nominvalue is not None: + text += " NO MINVALUE" + if create.element.nomaxvalue is not None: + text += " NO MAXVALUE" + if create.element.cycle is not None: + text += " CYCLE" return text def visit_drop_sequence(self, drop): return "DROP SEQUENCE %s" % \ - self.preparer.format_sequence(drop.element) + self.preparer.format_sequence(drop.element) def visit_drop_constraint(self, drop): + constraint = drop.element + if constraint.name is not None: + formatted_name = self.preparer.format_constraint(constraint) + else: + formatted_name = None + + if formatted_name is None: + raise exc.CompileError( + "Can't emit DROP CONSTRAINT for constraint %r; " + "it has no name" % drop.element) return "ALTER TABLE %s DROP CONSTRAINT %s%s" % ( self.preparer.format_table(drop.element.table), - self.preparer.format_constraint(drop.element), + formatted_name, drop.cascade and " CASCADE" or "" ) def get_column_specification(self, column, **kwargs): colspec = self.preparer.format_column(column) + " " + \ - self.dialect.type_compiler.process(column.type) + self.dialect.type_compiler.process( + column.type, type_expression=column) default = self.get_column_default_string(column) if default is not None: colspec += " DEFAULT " + default @@ -2506,26 +2353,29 @@ def get_column_default_string(self, column): if isinstance(column.server_default.arg, util.string_types): return "'%s'" % column.server_default.arg else: - return self.sql_compiler.process(column.server_default.arg) + return self.sql_compiler.process( + column.server_default.arg, literal_binds=True) else: return None def visit_check_constraint(self, constraint): text = "" if constraint.name is not None: - text += "CONSTRAINT %s " % \ - self.preparer.format_constraint(constraint) + formatted_name = self.preparer.format_constraint(constraint) + if formatted_name is not None: + text += "CONSTRAINT %s " % formatted_name text += "CHECK (%s)" % self.sql_compiler.process(constraint.sqltext, - include_table=False, - literal_binds=True) + include_table=False, + literal_binds=True) text += self.define_constraint_deferrability(constraint) return text def visit_column_check_constraint(self, constraint): text = "" if constraint.name is not None: - text += "CONSTRAINT %s " % \ - self.preparer.format_constraint(constraint) + formatted_name = self.preparer.format_constraint(constraint) + if formatted_name is not None: + text += "CONSTRAINT %s " % formatted_name text += "CHECK (%s)" % constraint.sqltext text += self.define_constraint_deferrability(constraint) return text @@ -2535,11 +2385,12 @@ def visit_primary_key_constraint(self, constraint): return '' text = "" if constraint.name is not None: - text += "CONSTRAINT %s " % \ - self.preparer.format_constraint(constraint) + formatted_name = self.preparer.format_constraint(constraint) + if formatted_name is not None: + text += "CONSTRAINT %s " % formatted_name text += "PRIMARY KEY " text += "(%s)" % ', '.join(self.preparer.quote(c.name) - for c in constraint) + for c in constraint) text += self.define_constraint_deferrability(constraint) return text @@ -2547,16 +2398,17 @@ def visit_foreign_key_constraint(self, constraint): preparer = self.dialect.identifier_preparer text = "" if constraint.name is not None: - text += "CONSTRAINT %s " % \ - preparer.format_constraint(constraint) - remote_table = list(constraint._elements.values())[0].column.table + formatted_name = self.preparer.format_constraint(constraint) + if formatted_name is not None: + text += "CONSTRAINT %s " % formatted_name + remote_table = list(constraint.elements)[0].column.table text += "FOREIGN KEY(%s) REFERENCES %s (%s)" % ( ', '.join(preparer.quote(f.parent.name) - for f in constraint._elements.values()), + for f in constraint.elements), self.define_constraint_remote_table( - constraint, remote_table, preparer), + constraint, remote_table, preparer), ', '.join(preparer.quote(f.column.name) - for f in constraint._elements.values()) + for f in constraint.elements) ) text += self.define_constraint_match(constraint) text += self.define_constraint_cascades(constraint) @@ -2573,11 +2425,11 @@ def visit_unique_constraint(self, constraint): return '' text = "" if constraint.name is not None: - text += "CONSTRAINT %s " % \ - self.preparer.format_constraint(constraint) + formatted_name = self.preparer.format_constraint(constraint) + text += "CONSTRAINT %s " % formatted_name text += "UNIQUE (%s)" % ( - ', '.join(self.preparer.quote(c.name) - for c in constraint)) + ', '.join(self.preparer.quote(c.name) + for c in constraint)) text += self.define_constraint_deferrability(constraint) return text @@ -2609,59 +2461,59 @@ def define_constraint_match(self, constraint): class GenericTypeCompiler(TypeCompiler): - def visit_FLOAT(self, type_): + def visit_FLOAT(self, type_, **kw): return "FLOAT" - def visit_REAL(self, type_): + def visit_REAL(self, type_, **kw): return "REAL" - def visit_NUMERIC(self, type_): + def visit_NUMERIC(self, type_, **kw): if type_.precision is None: return "NUMERIC" elif type_.scale is None: return "NUMERIC(%(precision)s)" % \ - {'precision': type_.precision} + {'precision': type_.precision} else: return "NUMERIC(%(precision)s, %(scale)s)" % \ - {'precision': type_.precision, - 'scale': type_.scale} + {'precision': type_.precision, + 'scale': type_.scale} - def visit_DECIMAL(self, type_): + def visit_DECIMAL(self, type_, **kw): if type_.precision is None: return "DECIMAL" elif type_.scale is None: return "DECIMAL(%(precision)s)" % \ - {'precision': type_.precision} + {'precision': type_.precision} else: return "DECIMAL(%(precision)s, %(scale)s)" % \ - {'precision': type_.precision, - 'scale': type_.scale} + {'precision': type_.precision, + 'scale': type_.scale} - def visit_INTEGER(self, type_): + def visit_INTEGER(self, type_, **kw): return "INTEGER" - def visit_SMALLINT(self, type_): + def visit_SMALLINT(self, type_, **kw): return "SMALLINT" - def visit_BIGINT(self, type_): + def visit_BIGINT(self, type_, **kw): return "BIGINT" - def visit_TIMESTAMP(self, type_): + def visit_TIMESTAMP(self, type_, **kw): return 'TIMESTAMP' - def visit_DATETIME(self, type_): + def visit_DATETIME(self, type_, **kw): return "DATETIME" - def visit_DATE(self, type_): + def visit_DATE(self, type_, **kw): return "DATE" - def visit_TIME(self, type_): + def visit_TIME(self, type_, **kw): return "TIME" - def visit_CLOB(self, type_): + def visit_CLOB(self, type_, **kw): return "CLOB" - def visit_NCLOB(self, type_): + def visit_NCLOB(self, type_, **kw): return "NCLOB" def _render_string_type(self, type_, name): @@ -2673,94 +2525,95 @@ def _render_string_type(self, type_, name): text += ' COLLATE "%s"' % type_.collation return text - def visit_CHAR(self, type_): + def visit_CHAR(self, type_, **kw): return self._render_string_type(type_, "CHAR") - def visit_NCHAR(self, type_): + def visit_NCHAR(self, type_, **kw): return self._render_string_type(type_, "NCHAR") - def visit_VARCHAR(self, type_): + def visit_VARCHAR(self, type_, **kw): return self._render_string_type(type_, "VARCHAR") - def visit_NVARCHAR(self, type_): + def visit_NVARCHAR(self, type_, **kw): return self._render_string_type(type_, "NVARCHAR") - def visit_TEXT(self, type_): + def visit_TEXT(self, type_, **kw): return self._render_string_type(type_, "TEXT") - def visit_BLOB(self, type_): + def visit_BLOB(self, type_, **kw): return "BLOB" - def visit_BINARY(self, type_): + def visit_BINARY(self, type_, **kw): return "BINARY" + (type_.length and "(%d)" % type_.length or "") - def visit_VARBINARY(self, type_): + def visit_VARBINARY(self, type_, **kw): return "VARBINARY" + (type_.length and "(%d)" % type_.length or "") - def visit_BOOLEAN(self, type_): + def visit_BOOLEAN(self, type_, **kw): return "BOOLEAN" - def visit_large_binary(self, type_): - return self.visit_BLOB(type_) + def visit_large_binary(self, type_, **kw): + return self.visit_BLOB(type_, **kw) - def visit_boolean(self, type_): - return self.visit_BOOLEAN(type_) + def visit_boolean(self, type_, **kw): + return self.visit_BOOLEAN(type_, **kw) - def visit_time(self, type_): - return self.visit_TIME(type_) + def visit_time(self, type_, **kw): + return self.visit_TIME(type_, **kw) - def visit_datetime(self, type_): - return self.visit_DATETIME(type_) + def visit_datetime(self, type_, **kw): + return self.visit_DATETIME(type_, **kw) - def visit_date(self, type_): - return self.visit_DATE(type_) + def visit_date(self, type_, **kw): + return self.visit_DATE(type_, **kw) - def visit_big_integer(self, type_): - return self.visit_BIGINT(type_) + def visit_big_integer(self, type_, **kw): + return self.visit_BIGINT(type_, **kw) - def visit_small_integer(self, type_): - return self.visit_SMALLINT(type_) + def visit_small_integer(self, type_, **kw): + return self.visit_SMALLINT(type_, **kw) - def visit_integer(self, type_): - return self.visit_INTEGER(type_) + def visit_integer(self, type_, **kw): + return self.visit_INTEGER(type_, **kw) - def visit_real(self, type_): - return self.visit_REAL(type_) + def visit_real(self, type_, **kw): + return self.visit_REAL(type_, **kw) - def visit_float(self, type_): - return self.visit_FLOAT(type_) + def visit_float(self, type_, **kw): + return self.visit_FLOAT(type_, **kw) - def visit_numeric(self, type_): - return self.visit_NUMERIC(type_) + def visit_numeric(self, type_, **kw): + return self.visit_NUMERIC(type_, **kw) - def visit_string(self, type_): - return self.visit_VARCHAR(type_) + def visit_string(self, type_, **kw): + return self.visit_VARCHAR(type_, **kw) - def visit_unicode(self, type_): - return self.visit_VARCHAR(type_) + def visit_unicode(self, type_, **kw): + return self.visit_VARCHAR(type_, **kw) - def visit_text(self, type_): - return self.visit_TEXT(type_) + def visit_text(self, type_, **kw): + return self.visit_TEXT(type_, **kw) - def visit_unicode_text(self, type_): - return self.visit_TEXT(type_) + def visit_unicode_text(self, type_, **kw): + return self.visit_TEXT(type_, **kw) - def visit_enum(self, type_): - return self.visit_VARCHAR(type_) + def visit_enum(self, type_, **kw): + return self.visit_VARCHAR(type_, **kw) - def visit_null(self, type_): + def visit_null(self, type_, **kw): raise exc.CompileError("Can't generate DDL for %r; " - "did you forget to specify a " - "type on this Column?" % type_) + "did you forget to specify a " + "type on this Column?" % type_) - def visit_type_decorator(self, type_): - return self.process(type_.type_engine(self.dialect)) + def visit_type_decorator(self, type_, **kw): + return self.process(type_.type_engine(self.dialect), **kw) - def visit_user_defined(self, type_): - return type_.get_col_spec() + def visit_user_defined(self, type_, **kw): + return type_.get_col_spec(**kw) class IdentifierPreparer(object): + """Handle quoting and case-folding of identifiers based on options.""" reserved_words = RESERVED_WORDS @@ -2770,7 +2623,7 @@ class IdentifierPreparer(object): illegal_initial_characters = ILLEGAL_INITIAL_CHARACTERS def __init__(self, dialect, initial_quote='"', - final_quote=None, escape_quote='"', omit_schema=False): + final_quote=None, escape_quote='"', omit_schema=False): """Construct a new ``IdentifierPreparer`` object. initial_quote @@ -2819,8 +2672,8 @@ def quote_identifier(self, value): """ return self.initial_quote + \ - self._escape_identifier(value) + \ - self.final_quote + self._escape_identifier(value) + \ + self.final_quote def _requires_quotes(self, value): """Return True if the given identifier requires quoting.""" @@ -2865,7 +2718,8 @@ def quote(self, ident, force=None): def format_sequence(self, sequence, use_schema=True): name = self.quote(sequence.name) - if not self.omit_schema and use_schema and sequence.schema is not None: + if (not self.omit_schema and use_schema and + sequence.schema is not None): name = self.quote_schema(sequence.schema) + "." + name return name @@ -2878,7 +2732,15 @@ def format_alias(self, alias, name=None): def format_savepoint(self, savepoint, name=None): return self.quote(name or savepoint.ident) - def format_constraint(self, constraint): + @util.dependencies("sqlalchemy.sql.naming") + def format_constraint(self, naming, constraint): + if isinstance(constraint.name, elements._defer_name): + name = naming._constraint_name_for_table( + constraint, constraint.table) + if name: + return self.quote(name) + elif isinstance(constraint.name, elements._defer_none_name): + return None return self.quote(constraint.name) def format_table(self, table, use_schema=True, name=None): @@ -2888,7 +2750,7 @@ def format_table(self, table, use_schema=True, name=None): name = table.name result = self.quote(name) if not self.omit_schema and use_schema \ - and getattr(table, "schema", None): + and getattr(table, "schema", None): result = self.quote_schema(table.schema) + "." + result return result @@ -2898,7 +2760,7 @@ def format_schema(self, name, quote=None): return self.quote(name, quote) def format_column(self, column, use_table=False, - name=None, table_name=None): + name=None, table_name=None): """Prepare a quoted column name.""" if name is None: @@ -2906,8 +2768,8 @@ def format_column(self, column, use_table=False, if not getattr(column, 'is_literal', False): if use_table: return self.format_table( - column.table, use_schema=False, - name=table_name) + "." + self.quote(name) + column.table, use_schema=False, + name=table_name) + "." + self.quote(name) else: return self.quote(name) else: @@ -2915,8 +2777,9 @@ def format_column(self, column, use_table=False, # which shouldn't get quoted if use_table: - return self.format_table(column.table, - use_schema=False, name=table_name) + '.' + name + return self.format_table( + column.table, use_schema=False, + name=table_name) + '.' + name else: return name @@ -2937,9 +2800,9 @@ def format_table_seq(self, table, use_schema=True): @util.memoized_property def _r_identifiers(self): initial, final, escaped_final = \ - [re.escape(s) for s in - (self.initial_quote, self.final_quote, - self._escape_identifier(self.final_quote))] + [re.escape(s) for s in + (self.initial_quote, self.final_quote, + self._escape_identifier(self.final_quote))] r = re.compile( r'(?:' r'(?:%(initial)s((?:%(escaped)s|[^%(final)s])+)%(final)s' diff --git a/lib/sqlalchemy/sql/crud.py b/lib/sqlalchemy/sql/crud.py new file mode 100644 index 0000000000..ec5e0c5bee --- /dev/null +++ b/lib/sqlalchemy/sql/crud.py @@ -0,0 +1,571 @@ +# sql/crud.py +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Functions used by compiler.py to determine the parameters rendered +within INSERT and UPDATE statements. + +""" +from .. import util +from .. import exc +from . import elements +import operator + +REQUIRED = util.symbol('REQUIRED', """ +Placeholder for the value within a :class:`.BindParameter` +which is required to be present when the statement is passed +to :meth:`.Connection.execute`. + +This symbol is typically used when a :func:`.expression.insert` +or :func:`.expression.update` statement is compiled without parameter +values present. + +""") + + +def _get_crud_params(compiler, stmt, **kw): + """create a set of tuples representing column/string pairs for use + in an INSERT or UPDATE statement. + + Also generates the Compiled object's postfetch, prefetch, and + returning column collections, used for default handling and ultimately + populating the ResultProxy's prefetch_cols() and postfetch_cols() + collections. + + """ + + compiler.postfetch = [] + compiler.prefetch = [] + compiler.returning = [] + + # no parameters in the statement, no parameters in the + # compiled params - return binds for all columns + if compiler.column_keys is None and stmt.parameters is None: + return [ + (c, _create_bind_param( + compiler, c, None, required=True)) + for c in stmt.table.columns + ] + + if stmt._has_multi_parameters: + stmt_parameters = stmt.parameters[0] + else: + stmt_parameters = stmt.parameters + + # getters - these are normally just column.key, + # but in the case of mysql multi-table update, the rules for + # .key must conditionally take tablename into account + _column_as_key, _getattr_col_key, _col_bind_name = \ + _key_getters_for_crud_column(compiler) + + # if we have statement parameters - set defaults in the + # compiled params + if compiler.column_keys is None: + parameters = {} + else: + parameters = dict((_column_as_key(key), REQUIRED) + for key in compiler.column_keys + if not stmt_parameters or + key not in stmt_parameters) + + # create a list of column assignment clauses as tuples + values = [] + + if stmt_parameters is not None: + _get_stmt_parameters_params( + compiler, + parameters, stmt_parameters, _column_as_key, values, kw) + + check_columns = {} + + # special logic that only occurs for multi-table UPDATE + # statements + if compiler.isupdate and stmt._extra_froms and stmt_parameters: + _get_multitable_params( + compiler, stmt, stmt_parameters, check_columns, + _col_bind_name, _getattr_col_key, values, kw) + + if compiler.isinsert and stmt.select_names: + _scan_insert_from_select_cols( + compiler, stmt, parameters, + _getattr_col_key, _column_as_key, + _col_bind_name, check_columns, values, kw) + else: + _scan_cols( + compiler, stmt, parameters, + _getattr_col_key, _column_as_key, + _col_bind_name, check_columns, values, kw) + + if parameters and stmt_parameters: + check = set(parameters).intersection( + _column_as_key(k) for k in stmt_parameters + ).difference(check_columns) + if check: + raise exc.CompileError( + "Unconsumed column names: %s" % + (", ".join("%s" % c for c in check)) + ) + + if stmt._has_multi_parameters: + values = _extend_values_for_multiparams(compiler, stmt, values, kw) + + return values + + +def _create_bind_param( + compiler, col, value, process=True, + required=False, name=None, **kw): + if name is None: + name = col.key + bindparam = elements.BindParameter( + name, value, type_=col.type, required=required) + bindparam._is_crud = True + if process: + bindparam = bindparam._compiler_dispatch(compiler, **kw) + return bindparam + + +def _key_getters_for_crud_column(compiler): + if compiler.isupdate and compiler.statement._extra_froms: + # when extra tables are present, refer to the columns + # in those extra tables as table-qualified, including in + # dictionaries and when rendering bind param names. + # the "main" table of the statement remains unqualified, + # allowing the most compatibility with a non-multi-table + # statement. + _et = set(compiler.statement._extra_froms) + + def _column_as_key(key): + str_key = elements._column_as_key(key) + if hasattr(key, 'table') and key.table in _et: + return (key.table.name, str_key) + else: + return str_key + + def _getattr_col_key(col): + if col.table in _et: + return (col.table.name, col.key) + else: + return col.key + + def _col_bind_name(col): + if col.table in _et: + return "%s_%s" % (col.table.name, col.key) + else: + return col.key + + else: + _column_as_key = elements._column_as_key + _getattr_col_key = _col_bind_name = operator.attrgetter("key") + + return _column_as_key, _getattr_col_key, _col_bind_name + + +def _scan_insert_from_select_cols( + compiler, stmt, parameters, _getattr_col_key, + _column_as_key, _col_bind_name, check_columns, values, kw): + + need_pks, implicit_returning, \ + implicit_return_defaults, postfetch_lastrowid = \ + _get_returning_modifiers(compiler, stmt) + + cols = [stmt.table.c[_column_as_key(name)] + for name in stmt.select_names] + + compiler._insert_from_select = stmt.select + + add_select_cols = [] + if stmt.include_insert_from_select_defaults: + col_set = set(cols) + for col in stmt.table.columns: + if col not in col_set and col.default: + cols.append(col) + + for c in cols: + col_key = _getattr_col_key(c) + if col_key in parameters and col_key not in check_columns: + parameters.pop(col_key) + values.append((c, None)) + else: + _append_param_insert_select_hasdefault( + compiler, stmt, c, add_select_cols, kw) + + if add_select_cols: + values.extend(add_select_cols) + compiler._insert_from_select = compiler._insert_from_select._generate() + compiler._insert_from_select._raw_columns = \ + tuple(compiler._insert_from_select._raw_columns) + tuple( + expr for col, expr in add_select_cols) + + +def _scan_cols( + compiler, stmt, parameters, _getattr_col_key, + _column_as_key, _col_bind_name, check_columns, values, kw): + + need_pks, implicit_returning, \ + implicit_return_defaults, postfetch_lastrowid = \ + _get_returning_modifiers(compiler, stmt) + + if stmt._parameter_ordering: + parameter_ordering = [ + _column_as_key(key) for key in stmt._parameter_ordering + ] + ordered_keys = set(parameter_ordering) + cols = [ + stmt.table.c[key] for key in parameter_ordering + ] + [ + c for c in stmt.table.c if c.key not in ordered_keys + ] + else: + cols = stmt.table.columns + + for c in cols: + col_key = _getattr_col_key(c) + if col_key in parameters and col_key not in check_columns: + + _append_param_parameter( + compiler, stmt, c, col_key, parameters, _col_bind_name, + implicit_returning, implicit_return_defaults, values, kw) + + elif compiler.isinsert: + if c.primary_key and \ + need_pks and \ + ( + implicit_returning or + not postfetch_lastrowid or + c is not stmt.table._autoincrement_column + ): + + if implicit_returning: + _append_param_insert_pk_returning( + compiler, stmt, c, values, kw) + else: + _append_param_insert_pk(compiler, stmt, c, values, kw) + + elif c.default is not None: + + _append_param_insert_hasdefault( + compiler, stmt, c, implicit_return_defaults, + values, kw) + + elif c.server_default is not None: + if implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + elif not c.primary_key: + compiler.postfetch.append(c) + elif implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + + elif compiler.isupdate: + _append_param_update( + compiler, stmt, c, implicit_return_defaults, values, kw) + + +def _append_param_parameter( + compiler, stmt, c, col_key, parameters, _col_bind_name, + implicit_returning, implicit_return_defaults, values, kw): + value = parameters.pop(col_key) + if elements._is_literal(value): + value = _create_bind_param( + compiler, c, value, required=value is REQUIRED, + name=_col_bind_name(c) + if not stmt._has_multi_parameters + else "%s_0" % _col_bind_name(c), + **kw + ) + else: + if isinstance(value, elements.BindParameter) and \ + value.type._isnull: + value = value._clone() + value.type = c.type + + if c.primary_key and implicit_returning: + compiler.returning.append(c) + value = compiler.process(value.self_group(), **kw) + elif implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + value = compiler.process(value.self_group(), **kw) + else: + compiler.postfetch.append(c) + value = compiler.process(value.self_group(), **kw) + values.append((c, value)) + + +def _append_param_insert_pk_returning(compiler, stmt, c, values, kw): + if c.default is not None: + if c.default.is_sequence: + if compiler.dialect.supports_sequences and \ + (not c.default.optional or + not compiler.dialect.sequences_optional): + proc = compiler.process(c.default, **kw) + values.append((c, proc)) + compiler.returning.append(c) + elif c.default.is_clause_element: + values.append( + (c, compiler.process( + c.default.arg.self_group(), **kw)) + ) + compiler.returning.append(c) + else: + values.append( + (c, _create_prefetch_bind_param(compiler, c)) + ) + + else: + compiler.returning.append(c) + + +def _create_prefetch_bind_param(compiler, c, process=True, name=None): + param = _create_bind_param(compiler, c, None, process=process, name=name) + compiler.prefetch.append(c) + return param + + +class _multiparam_column(elements.ColumnElement): + def __init__(self, original, index): + self.key = "%s_%d" % (original.key, index + 1) + self.original = original + self.default = original.default + self.type = original.type + + def __eq__(self, other): + return isinstance(other, _multiparam_column) and \ + other.key == self.key and \ + other.original == self.original + + +def _process_multiparam_default_bind(compiler, c, index, kw): + + if not c.default: + raise exc.CompileError( + "INSERT value for column %s is explicitly rendered as a bound" + "parameter in the VALUES clause; " + "a Python-side value or SQL expression is required" % c) + elif c.default.is_clause_element: + return compiler.process(c.default.arg.self_group(), **kw) + else: + col = _multiparam_column(c, index) + return _create_prefetch_bind_param(compiler, col) + + +def _append_param_insert_pk(compiler, stmt, c, values, kw): + if ( + (c.default is not None and + (not c.default.is_sequence or + compiler.dialect.supports_sequences)) or + c is stmt.table._autoincrement_column and + (compiler.dialect.supports_sequences or + compiler.dialect. + preexecute_autoincrement_sequences) + ): + values.append( + (c, _create_prefetch_bind_param(compiler, c)) + ) + + +def _append_param_insert_hasdefault( + compiler, stmt, c, implicit_return_defaults, values, kw): + + if c.default.is_sequence: + if compiler.dialect.supports_sequences and \ + (not c.default.optional or + not compiler.dialect.sequences_optional): + proc = compiler.process(c.default, **kw) + values.append((c, proc)) + if implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + elif not c.primary_key: + compiler.postfetch.append(c) + elif c.default.is_clause_element: + proc = compiler.process(c.default.arg.self_group(), **kw) + values.append((c, proc)) + + if implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + elif not c.primary_key: + # don't add primary key column to postfetch + compiler.postfetch.append(c) + else: + values.append( + (c, _create_prefetch_bind_param(compiler, c)) + ) + + +def _append_param_insert_select_hasdefault( + compiler, stmt, c, values, kw): + + if c.default.is_sequence: + if compiler.dialect.supports_sequences and \ + (not c.default.optional or + not compiler.dialect.sequences_optional): + proc = c.default + values.append((c, proc)) + elif c.default.is_clause_element: + proc = c.default.arg.self_group() + values.append((c, proc)) + else: + values.append( + (c, _create_prefetch_bind_param(compiler, c, process=False)) + ) + + +def _append_param_update( + compiler, stmt, c, implicit_return_defaults, values, kw): + + if c.onupdate is not None and not c.onupdate.is_sequence: + if c.onupdate.is_clause_element: + values.append( + (c, compiler.process( + c.onupdate.arg.self_group(), **kw)) + ) + if implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + else: + compiler.postfetch.append(c) + else: + values.append( + (c, _create_prefetch_bind_param(compiler, c)) + ) + elif c.server_onupdate is not None: + if implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + else: + compiler.postfetch.append(c) + elif implicit_return_defaults and \ + stmt._return_defaults is not True and \ + c in implicit_return_defaults: + compiler.returning.append(c) + + +def _get_multitable_params( + compiler, stmt, stmt_parameters, check_columns, + _col_bind_name, _getattr_col_key, values, kw): + + normalized_params = dict( + (elements._clause_element_as_expr(c), param) + for c, param in stmt_parameters.items() + ) + affected_tables = set() + for t in stmt._extra_froms: + for c in t.c: + if c in normalized_params: + affected_tables.add(t) + check_columns[_getattr_col_key(c)] = c + value = normalized_params[c] + if elements._is_literal(value): + value = _create_bind_param( + compiler, c, value, required=value is REQUIRED, + name=_col_bind_name(c)) + else: + compiler.postfetch.append(c) + value = compiler.process(value.self_group(), **kw) + values.append((c, value)) + # determine tables which are actually to be updated - process onupdate + # and server_onupdate for these + for t in affected_tables: + for c in t.c: + if c in normalized_params: + continue + elif (c.onupdate is not None and not + c.onupdate.is_sequence): + if c.onupdate.is_clause_element: + values.append( + (c, compiler.process( + c.onupdate.arg.self_group(), + **kw) + ) + ) + compiler.postfetch.append(c) + else: + values.append( + (c, _create_prefetch_bind_param( + compiler, c, name=_col_bind_name(c))) + ) + elif c.server_onupdate is not None: + compiler.postfetch.append(c) + + +def _extend_values_for_multiparams(compiler, stmt, values, kw): + values_0 = values + values = [values] + + values.extend( + [ + ( + c, + (_create_bind_param( + compiler, c, row[c.key], + name="%s_%d" % (c.key, i + 1) + ) if elements._is_literal(row[c.key]) + else compiler.process( + row[c.key].self_group(), **kw)) + if c.key in row else + _process_multiparam_default_bind(compiler, c, i, kw) + ) + for (c, param) in values_0 + ] + for i, row in enumerate(stmt.parameters[1:]) + ) + return values + + +def _get_stmt_parameters_params( + compiler, parameters, stmt_parameters, _column_as_key, values, kw): + for k, v in stmt_parameters.items(): + colkey = _column_as_key(k) + if colkey is not None: + parameters.setdefault(colkey, v) + else: + # a non-Column expression on the left side; + # add it to values() in an "as-is" state, + # coercing right side to bound param + if elements._is_literal(v): + v = compiler.process( + elements.BindParameter(None, v, type_=k.type), + **kw) + else: + v = compiler.process(v.self_group(), **kw) + + values.append((k, v)) + + +def _get_returning_modifiers(compiler, stmt): + need_pks = compiler.isinsert and \ + not compiler.inline and \ + not stmt._returning and \ + not stmt._has_multi_parameters + + implicit_returning = need_pks and \ + compiler.dialect.implicit_returning and \ + stmt.table.implicit_returning + + if compiler.isinsert: + implicit_return_defaults = (implicit_returning and + stmt._return_defaults) + elif compiler.isupdate: + implicit_return_defaults = (compiler.dialect.implicit_returning and + stmt.table.implicit_returning and + stmt._return_defaults) + else: + implicit_return_defaults = False + + if implicit_return_defaults: + if stmt._return_defaults is True: + implicit_return_defaults = set(stmt.table.c) + else: + implicit_return_defaults = set(stmt._return_defaults) + + postfetch_lastrowid = need_pks and compiler.dialect.postfetch_lastrowid + + return need_pks, implicit_returning, \ + implicit_return_defaults, postfetch_lastrowid diff --git a/lib/sqlalchemy/sql/ddl.py b/lib/sqlalchemy/sql/ddl.py index bda8765021..1cb9eeb70d 100644 --- a/lib/sqlalchemy/sql/ddl.py +++ b/lib/sqlalchemy/sql/ddl.py @@ -1,5 +1,6 @@ # sql/ddl.py -# Copyright (C) 2009-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2009-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -11,12 +12,12 @@ from .. import util from .elements import ClauseElement -from .visitors import traverse from .base import Executable, _generative, SchemaVisitor, _bind_or_error from ..util import topological from .. import event from .. import exc + class _DDLCompiles(ClauseElement): def _compiler(self, dialect, **kw): """Return a compiler appropriate for this ClauseElement, given a @@ -56,7 +57,7 @@ class DDLElement(Executable, _DDLCompiles): """ _execution_options = Executable.\ - _execution_options.union({'autocommit': True}) + _execution_options.union({'autocommit': True}) target = None on = None @@ -95,10 +96,10 @@ def execute(self, bind=None, target=None): return bind.execute(self.against(target)) else: bind.engine.logger.info( - "DDL execution skipped, criteria not met.") + "DDL execution skipped, criteria not met.") @util.deprecated("0.7", "See :class:`.DDLEvents`, as well as " - ":meth:`.DDLElement.execute_if`.") + ":meth:`.DDLElement.execute_if`.") def execute_at(self, event_name, target): """Link execution of this DDL to the DDL lifecycle of a SchemaItem. @@ -129,7 +130,7 @@ def execute_at(self, event_name, target): def call_event(target, connection, **kw): if self._should_execute_deprecated(event_name, - target, connection, **kw): + target, connection, **kw): return connection.execute(self.against(target)) event.listen(target, "" + event_name.replace('-', '_'), call_event) @@ -211,7 +212,7 @@ def execute_if(self, dialect=None, callable_=None, state=None): def _should_execute(self, target, bind, **kw): if self.on is not None and \ - not self._should_execute_deprecated(None, target, bind, **kw): + not self._should_execute_deprecated(None, target, bind, **kw): return False if isinstance(self.dialect, util.string_types): @@ -220,8 +221,9 @@ def _should_execute(self, target, bind, **kw): elif isinstance(self.dialect, (tuple, list, set)): if bind.engine.name not in self.dialect: return False - if self.callable_ is not None and \ - not self.callable_(self, target, bind, state=self.state, **kw): + if (self.callable_ is not None and + not self.callable_(self, target, bind, + state=self.state, **kw)): return False return True @@ -245,7 +247,7 @@ def __call__(self, target, bind, **kw): def _check_ddl_on(self, on): if (on is not None and (not isinstance(on, util.string_types + (tuple, list, set)) and - not util.callable(on))): + not util.callable(on))): raise exc.ArgumentError( "Expected the name of a database dialect, a tuple " "of names, or a callable for " @@ -295,7 +297,7 @@ class DDL(DDLElement): %(fullname)s - the Table name including schema, quoted if needed The DDL's "context", if any, will be combined with the standard - substutions noted above. Keys present in the context will override + substitutions noted above. Keys present in the context will override the standard substitutions. """ @@ -367,7 +369,7 @@ def __init__(self, statement, on=None, context=None, bind=None): :class:`.DDLEvents` - :mod:`sqlalchemy.event` + :ref:`event_toplevel` """ @@ -392,9 +394,8 @@ def __repr__(self): if getattr(self, key)])) - class _CreateDropBase(DDLElement): - """Base class for DDL constucts that represent CREATE and DROP or + """Base class for DDL constructs that represent CREATE and DROP or equivalents. The common theme of _CreateDropBase is a single @@ -462,19 +463,28 @@ class CreateTable(_CreateDropBase): __visit_name__ = "create_table" - def __init__(self, element, on=None, bind=None): + def __init__( + self, element, on=None, bind=None, + include_foreign_key_constraints=None): """Create a :class:`.CreateTable` construct. :param element: a :class:`.Table` that's the subject of the CREATE :param on: See the description for 'on' in :class:`.DDL`. :param bind: See the description for 'bind' in :class:`.DDL`. + :param include_foreign_key_constraints: optional sequence of + :class:`.ForeignKeyConstraint` objects that will be included + inline within the CREATE construct; if omitted, all foreign key + constraints that do not specify use_alter=True are included. + + .. versionadded:: 1.0.0 """ super(CreateTable, self).__init__(element, on=on, bind=bind) self.columns = [CreateColumn(column) - for column in element.columns - ] + for column in element.columns + ] + self.include_foreign_key_constraints = include_foreign_key_constraints class _DropView(_CreateDropBase): @@ -560,9 +570,10 @@ def compile(element, compiler, **kw): as an implicitly-present "system" column. For example, suppose we wish to produce a :class:`.Table` which skips - rendering of the Postgresql ``xmin`` column against the Postgresql backend, - but on other backends does render it, in anticipation of a triggered rule. - A conditional compilation rule could skip this name only on Postgresql:: + rendering of the Postgresql ``xmin`` column against the Postgresql + backend, but on other backends does render it, in anticipation of a + triggered rule. A conditional compilation rule could skip this name only + on Postgresql:: from sqlalchemy.schema import CreateColumn @@ -584,7 +595,8 @@ def skip_xmin(element, compiler, **kw): will be omitted, but only against the Postgresql backend. .. versionadded:: 0.8.3 The :class:`.CreateColumn` construct supports - skipping of columns by returning ``None`` from a custom compilation rule. + skipping of columns by returning ``None`` from a custom compilation + rule. .. versionadded:: 0.8 The :class:`.CreateColumn` construct was added to support custom column creation styles. @@ -634,7 +646,7 @@ class AddConstraint(_CreateDropBase): def __init__(self, element, *args, **kw): super(AddConstraint, self).__init__(element, *args, **kw) element._create_rule = util.portable_instancemethod( - self._create_rule_disable) + self._create_rule_disable) class DropConstraint(_CreateDropBase): @@ -646,7 +658,7 @@ def __init__(self, element, cascade=False, **kw): self.cascade = cascade super(DropConstraint, self).__init__(element, **kw) element._create_rule = util.portable_instancemethod( - self._create_rule_disable) + self._create_rule_disable) class DDLBase(SchemaVisitor): @@ -670,21 +682,21 @@ def _can_create_table(self, table): if table.schema: self.dialect.validate_identifier(table.schema) return not self.checkfirst or \ - not self.dialect.has_table(self.connection, - table.name, schema=table.schema) + not self.dialect.has_table(self.connection, + table.name, schema=table.schema) def _can_create_sequence(self, sequence): return self.dialect.supports_sequences and \ ( (not self.dialect.sequences_optional or not sequence.optional) and - ( - not self.checkfirst or - not self.dialect.has_sequence( - self.connection, - sequence.name, - schema=sequence.schema) - ) + ( + not self.checkfirst or + not self.dialect.has_sequence( + self.connection, + sequence.name, + schema=sequence.schema) + ) ) def visit_metadata(self, metadata): @@ -692,48 +704,80 @@ def visit_metadata(self, metadata): tables = self.tables else: tables = list(metadata.tables.values()) - collection = [t for t in sort_tables(tables) - if self._can_create_table(t)] + + collection = sort_tables_and_constraints( + [t for t in tables if self._can_create_table(t)]) + seq_coll = [s for s in metadata._sequences.values() - if s.column is None and self._can_create_sequence(s)] + if s.column is None and self._can_create_sequence(s)] + event_collection = [ + t for (t, fks) in collection if t is not None + ] metadata.dispatch.before_create(metadata, self.connection, - tables=collection, - checkfirst=self.checkfirst, - _ddl_runner=self) + tables=event_collection, + checkfirst=self.checkfirst, + _ddl_runner=self) for seq in seq_coll: self.traverse_single(seq, create_ok=True) - for table in collection: - self.traverse_single(table, create_ok=True) + for table, fkcs in collection: + if table is not None: + self.traverse_single( + table, create_ok=True, + include_foreign_key_constraints=fkcs, + _is_metadata_operation=True) + else: + for fkc in fkcs: + self.traverse_single(fkc) metadata.dispatch.after_create(metadata, self.connection, - tables=collection, - checkfirst=self.checkfirst, - _ddl_runner=self) - - def visit_table(self, table, create_ok=False): + tables=event_collection, + checkfirst=self.checkfirst, + _ddl_runner=self) + + def visit_table( + self, table, create_ok=False, + include_foreign_key_constraints=None, + _is_metadata_operation=False): if not create_ok and not self._can_create_table(table): return - table.dispatch.before_create(table, self.connection, - checkfirst=self.checkfirst, - _ddl_runner=self) + table.dispatch.before_create( + table, self.connection, + checkfirst=self.checkfirst, + _ddl_runner=self, + _is_metadata_operation=_is_metadata_operation) for column in table.columns: if column.default is not None: self.traverse_single(column.default) - self.connection.execute(CreateTable(table)) + if not self.dialect.supports_alter: + # e.g., don't omit any foreign key constraints + include_foreign_key_constraints = None + + self.connection.execute( + CreateTable( + table, + include_foreign_key_constraints=include_foreign_key_constraints + )) if hasattr(table, 'indexes'): for index in table.indexes: self.traverse_single(index) - table.dispatch.after_create(table, self.connection, - checkfirst=self.checkfirst, - _ddl_runner=self) + table.dispatch.after_create( + table, self.connection, + checkfirst=self.checkfirst, + _ddl_runner=self, + _is_metadata_operation=_is_metadata_operation) + + def visit_foreign_key_constraint(self, constraint): + if not self.dialect.supports_alter: + return + self.connection.execute(AddConstraint(constraint)) def visit_sequence(self, sequence, create_ok=False): if not create_ok and not self._can_create_sequence(sequence): @@ -761,11 +805,51 @@ def visit_metadata(self, metadata): else: tables = list(metadata.tables.values()) - collection = [ - t - for t in reversed(sort_tables(tables)) - if self._can_drop_table(t) - ] + try: + unsorted_tables = [t for t in tables if self._can_drop_table(t)] + collection = list(reversed( + sort_tables_and_constraints( + unsorted_tables, + filter_fn=lambda constraint: False + if not self.dialect.supports_alter + or constraint.name is None + else None + ) + )) + except exc.CircularDependencyError as err2: + if not self.dialect.supports_alter: + util.warn( + "Can't sort tables for DROP; an " + "unresolvable foreign key " + "dependency exists between tables: %s, and backend does " + "not support ALTER. To restore at least a partial sort, " + "apply use_alter=True to ForeignKey and " + "ForeignKeyConstraint " + "objects involved in the cycle to mark these as known " + "cycles that will be ignored." + % ( + ", ".join(sorted([t.fullname for t in err2.cycles])) + ) + ) + collection = [(t, ()) for t in unsorted_tables] + else: + util.raise_from_cause( + exc.CircularDependencyError( + err2.args[0], + err2.cycles, err2.edges, + msg="Can't sort tables for DROP; an " + "unresolvable foreign key " + "dependency exists between tables: %s. Please ensure " + "that the ForeignKey and ForeignKeyConstraint objects " + "involved in the cycle have " + "names so that they can be dropped using " + "DROP CONSTRAINT." + % ( + ", ".join(sorted([t.fullname for t in err2.cycles])) + ) + + ) + ) seq_coll = [ s @@ -773,48 +857,59 @@ def visit_metadata(self, metadata): if s.column is None and self._can_drop_sequence(s) ] + event_collection = [ + t for (t, fks) in collection if t is not None + ] + metadata.dispatch.before_drop( - metadata, self.connection, tables=collection, + metadata, self.connection, tables=event_collection, checkfirst=self.checkfirst, _ddl_runner=self) - for table in collection: - self.traverse_single(table, drop_ok=True) + for table, fkcs in collection: + if table is not None: + self.traverse_single( + table, drop_ok=True, _is_metadata_operation=True) + else: + for fkc in fkcs: + self.traverse_single(fkc) for seq in seq_coll: self.traverse_single(seq, drop_ok=True) metadata.dispatch.after_drop( - metadata, self.connection, tables=collection, + metadata, self.connection, tables=event_collection, checkfirst=self.checkfirst, _ddl_runner=self) def _can_drop_table(self, table): self.dialect.validate_identifier(table.name) if table.schema: self.dialect.validate_identifier(table.schema) - return not self.checkfirst or self.dialect.has_table(self.connection, - table.name, schema=table.schema) + return not self.checkfirst or self.dialect.has_table( + self.connection, table.name, schema=table.schema) def _can_drop_sequence(self, sequence): return self.dialect.supports_sequences and \ ((not self.dialect.sequences_optional or - not sequence.optional) and + not sequence.optional) and (not self.checkfirst or - self.dialect.has_sequence( - self.connection, - sequence.name, - schema=sequence.schema)) - ) + self.dialect.has_sequence( + self.connection, + sequence.name, + schema=sequence.schema)) + ) def visit_index(self, index): self.connection.execute(DropIndex(index)) - def visit_table(self, table, drop_ok=False): + def visit_table(self, table, drop_ok=False, _is_metadata_operation=False): if not drop_ok and not self._can_drop_table(table): return - table.dispatch.before_drop(table, self.connection, - checkfirst=self.checkfirst, - _ddl_runner=self) + table.dispatch.before_drop( + table, self.connection, + checkfirst=self.checkfirst, + _ddl_runner=self, + _is_metadata_operation=_is_metadata_operation) for column in table.columns: if column.default is not None: @@ -822,43 +917,179 @@ def visit_table(self, table, drop_ok=False): self.connection.execute(DropTable(table)) - table.dispatch.after_drop(table, self.connection, - checkfirst=self.checkfirst, - _ddl_runner=self) + table.dispatch.after_drop( + table, self.connection, + checkfirst=self.checkfirst, + _ddl_runner=self, + _is_metadata_operation=_is_metadata_operation) + + def visit_foreign_key_constraint(self, constraint): + if not self.dialect.supports_alter: + return + self.connection.execute(DropConstraint(constraint)) def visit_sequence(self, sequence, drop_ok=False): if not drop_ok and not self._can_drop_sequence(sequence): return self.connection.execute(DropSequence(sequence)) + def sort_tables(tables, skip_fn=None, extra_dependencies=None): - """sort a collection of Table objects in order of - their foreign-key dependency.""" + """sort a collection of :class:`.Table` objects based on dependency. - tables = list(tables) - tuples = [] - if extra_dependencies is not None: - tuples.extend(extra_dependencies) + This is a dependency-ordered sort which will emit :class:`.Table` + objects such that they will follow their dependent :class:`.Table` objects. + Tables are dependent on another based on the presence of + :class:`.ForeignKeyConstraint` objects as well as explicit dependencies + added by :meth:`.Table.add_is_dependent_on`. + + .. warning:: + + The :func:`.sort_tables` function cannot by itself accommodate + automatic resolution of dependency cycles between tables, which + are usually caused by mutually dependent foreign key constraints. + To resolve these cycles, either the + :paramref:`.ForeignKeyConstraint.use_alter` parameter may be appled + to those constraints, or use the + :func:`.sql.sort_tables_and_constraints` function which will break + out foreign key constraints involved in cycles separately. + + :param tables: a sequence of :class:`.Table` objects. + + :param skip_fn: optional callable which will be passed a + :class:`.ForeignKey` object; if it returns True, this + constraint will not be considered as a dependency. Note this is + **different** from the same parameter in + :func:`.sort_tables_and_constraints`, which is + instead passed the owning :class:`.ForeignKeyConstraint` object. + + :param extra_dependencies: a sequence of 2-tuples of tables which will + also be considered as dependent on each other. + + .. seealso:: + + :func:`.sort_tables_and_constraints` + + :meth:`.MetaData.sorted_tables` - uses this function to sort + + + """ + + if skip_fn is not None: + def _skip_fn(fkc): + for fk in fkc.elements: + if skip_fn(fk): + return True + else: + return None + else: + _skip_fn = None + + return [ + t for (t, fkcs) in + sort_tables_and_constraints( + tables, filter_fn=_skip_fn, extra_dependencies=extra_dependencies) + if t is not None + ] + + +def sort_tables_and_constraints( + tables, filter_fn=None, extra_dependencies=None): + """sort a collection of :class:`.Table` / :class:`.ForeignKeyConstraint` + objects. + + This is a dependency-ordered sort which will emit tuples of + ``(Table, [ForeignKeyConstraint, ...])`` such that each + :class:`.Table` follows its dependent :class:`.Table` objects. + Remaining :class:`.ForeignKeyConstraint` objects that are separate due to + dependency rules not satisifed by the sort are emitted afterwards + as ``(None, [ForeignKeyConstraint ...])``. + + Tables are dependent on another based on the presence of + :class:`.ForeignKeyConstraint` objects, explicit dependencies + added by :meth:`.Table.add_is_dependent_on`, as well as dependencies + stated here using the :paramref:`~.sort_tables_and_constraints.skip_fn` + and/or :paramref:`~.sort_tables_and_constraints.extra_dependencies` + parameters. + + :param tables: a sequence of :class:`.Table` objects. + + :param filter_fn: optional callable which will be passed a + :class:`.ForeignKeyConstraint` object, and returns a value based on + whether this constraint should definitely be included or excluded as + an inline constraint, or neither. If it returns False, the constraint + will definitely be included as a dependency that cannot be subject + to ALTER; if True, it will **only** be included as an ALTER result at + the end. Returning None means the constraint is included in the + table-based result unless it is detected as part of a dependency cycle. + + :param extra_dependencies: a sequence of 2-tuples of tables which will + also be considered as dependent on each other. + + .. versionadded:: 1.0.0 + + .. seealso:: + + :func:`.sort_tables` - def visit_foreign_key(fkey): - if fkey.use_alter: - return - elif skip_fn and skip_fn(fkey): - return - parent_table = fkey.column.table - if parent_table in tables: - child_table = fkey.parent.table - if parent_table is not child_table: - tuples.append((parent_table, child_table)) + """ + + fixed_dependencies = set() + mutable_dependencies = set() + + if extra_dependencies is not None: + fixed_dependencies.update(extra_dependencies) + + remaining_fkcs = set() for table in tables: - traverse(table, - {'schema_visitor': True}, - {'foreign_key': visit_foreign_key}) + for fkc in table.foreign_key_constraints: + if fkc.use_alter is True: + remaining_fkcs.add(fkc) + continue + + if filter_fn: + filtered = filter_fn(fkc) + + if filtered is True: + remaining_fkcs.add(fkc) + continue + + dependent_on = fkc.referred_table + if dependent_on is not table: + mutable_dependencies.add((dependent_on, table)) - tuples.extend( - [parent, table] for parent in table._extra_dependencies + fixed_dependencies.update( + (parent, table) for parent in table._extra_dependencies ) - return list(topological.sort(tuples, tables)) + try: + candidate_sort = list( + topological.sort( + fixed_dependencies.union(mutable_dependencies), tables, + deterministic_order=True + ) + ) + except exc.CircularDependencyError as err: + for edge in err.edges: + if edge in mutable_dependencies: + table = edge[1] + can_remove = [ + fkc for fkc in table.foreign_key_constraints + if filter_fn is None or filter_fn(fkc) is not False] + remaining_fkcs.update(can_remove) + for fkc in can_remove: + dependent_on = fkc.referred_table + if dependent_on is not table: + mutable_dependencies.discard((dependent_on, table)) + candidate_sort = list( + topological.sort( + fixed_dependencies.union(mutable_dependencies), tables, + deterministic_order=True + ) + ) + return [ + (table, table.foreign_key_constraints.difference(remaining_fkcs)) + for table in candidate_sort + ] + [(None, list(remaining_fkcs))] diff --git a/lib/sqlalchemy/sql/default_comparator.py b/lib/sqlalchemy/sql/default_comparator.py index 6d595450be..d180dbc02e 100644 --- a/lib/sqlalchemy/sql/default_comparator.py +++ b/lib/sqlalchemy/sql/default_comparator.py @@ -1,5 +1,6 @@ # sql/default_comparator.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -8,275 +9,280 @@ """ from .. import exc, util -from . import operators from . import type_api +from . import operators from .elements import BindParameter, True_, False_, BinaryExpression, \ - Null, _const_expr, _clause_element_as_expr, \ - ClauseList, ColumnElement, TextClause, UnaryExpression, \ - collate, _is_literal, _literal_as_text, ClauseElement + Null, _const_expr, _clause_element_as_expr, \ + ClauseList, ColumnElement, TextClause, UnaryExpression, \ + collate, _is_literal, _literal_as_text, ClauseElement, and_, or_ from .selectable import SelectBase, Alias, Selectable, ScalarSelect -class _DefaultColumnComparator(operators.ColumnOperators): - """Defines comparison and math operations. - - See :class:`.ColumnOperators` and :class:`.Operators` for descriptions - of all operations. - - """ - - @util.memoized_property - def type(self): - return self.expr.type - - def operate(self, op, *other, **kwargs): - o = self.operators[op.__name__] - return o[0](self, self.expr, op, *(other + o[1:]), **kwargs) - - def reverse_operate(self, op, other, **kwargs): - o = self.operators[op.__name__] - return o[0](self, self.expr, op, other, reverse=True, *o[1:], **kwargs) - - def _adapt_expression(self, op, other_comparator): - """evaluate the return type of , - and apply any adaptations to the given operator. - - This method determines the type of a resulting binary expression - given two source types and an operator. For example, two - :class:`.Column` objects, both of the type :class:`.Integer`, will - produce a :class:`.BinaryExpression` that also has the type - :class:`.Integer` when compared via the addition (``+``) operator. - However, using the addition operator with an :class:`.Integer` - and a :class:`.Date` object will produce a :class:`.Date`, assuming - "days delta" behavior by the database (in reality, most databases - other than Postgresql don't accept this particular operation). - - The method returns a tuple of the form , . - The resulting operator and type will be those applied to the - resulting :class:`.BinaryExpression` as the final operator and the - right-hand side of the expression. - - Note that only a subset of operators make usage of - :meth:`._adapt_expression`, - including math operators and user-defined operators, but not - boolean comparison or special SQL keywords like MATCH or BETWEEN. - - """ - return op, other_comparator.type - - def _boolean_compare(self, expr, op, obj, negate=None, reverse=False, - _python_is_types=(util.NoneType, bool), - **kwargs): - - if isinstance(obj, _python_is_types + (Null, True_, False_)): - - # allow x ==/!= True/False to be treated as a literal. - # this comes out to "== / != true/false" or "1/0" if those - # constants aren't supported and works on all platforms - if op in (operators.eq, operators.ne) and \ - isinstance(obj, (bool, True_, False_)): - return BinaryExpression(expr, - _literal_as_text(obj), + +def _boolean_compare(expr, op, obj, negate=None, reverse=False, + _python_is_types=(util.NoneType, bool), + result_type = None, + **kwargs): + + if result_type is None: + result_type = type_api.BOOLEANTYPE + + if isinstance(obj, _python_is_types + (Null, True_, False_)): + + # allow x ==/!= True/False to be treated as a literal. + # this comes out to "== / != true/false" or "1/0" if those + # constants aren't supported and works on all platforms + if op in (operators.eq, operators.ne) and \ + isinstance(obj, (bool, True_, False_)): + return BinaryExpression(expr, + _literal_as_text(obj), + op, + type_=result_type, + negate=negate, modifiers=kwargs) + else: + # all other None/True/False uses IS, IS NOT + if op in (operators.eq, operators.is_): + return BinaryExpression(expr, _const_expr(obj), + operators.is_, + negate=operators.isnot) + elif op in (operators.ne, operators.isnot): + return BinaryExpression(expr, _const_expr(obj), + operators.isnot, + negate=operators.is_) + else: + raise exc.ArgumentError( + "Only '=', '!=', 'is_()', 'isnot()' operators can " + "be used with None/True/False") + else: + obj = _check_literal(expr, op, obj) + + if reverse: + return BinaryExpression(obj, + expr, op, - type_=type_api.BOOLEANTYPE, + type_=result_type, + negate=negate, modifiers=kwargs) + else: + return BinaryExpression(expr, + obj, + op, + type_=result_type, negate=negate, modifiers=kwargs) - else: - # all other None/True/False uses IS, IS NOT - if op in (operators.eq, operators.is_): - return BinaryExpression(expr, _const_expr(obj), - operators.is_, - negate=operators.isnot) - elif op in (operators.ne, operators.isnot): - return BinaryExpression(expr, _const_expr(obj), - operators.isnot, - negate=operators.is_) - else: - raise exc.ArgumentError( - "Only '=', '!=', 'is_()', 'isnot()' operators can " - "be used with None/True/False") - else: - obj = self._check_literal(expr, op, obj) - - if reverse: - return BinaryExpression(obj, - expr, - op, - type_=type_api.BOOLEANTYPE, - negate=negate, modifiers=kwargs) - else: - return BinaryExpression(expr, - obj, - op, - type_=type_api.BOOLEANTYPE, - negate=negate, modifiers=kwargs) - def _binary_operate(self, expr, op, obj, reverse=False, result_type=None, - **kw): - obj = self._check_literal(expr, op, obj) - if reverse: - left, right = obj, expr - else: - left, right = expr, obj - - if result_type is None: - op, result_type = left.comparator._adapt_expression( - op, right.comparator) - - return BinaryExpression(left, right, op, type_=result_type) - - def _scalar(self, expr, op, fn, **kw): - return fn(expr) - - def _in_impl(self, expr, op, seq_or_selectable, negate_op, **kw): - seq_or_selectable = _clause_element_as_expr(seq_or_selectable) - - if isinstance(seq_or_selectable, ScalarSelect): - return self._boolean_compare(expr, op, seq_or_selectable, - negate=negate_op) - elif isinstance(seq_or_selectable, SelectBase): - - # TODO: if we ever want to support (x, y, z) IN (select x, - # y, z from table), we would need a multi-column version of - # as_scalar() to produce a multi- column selectable that - # does not export itself as a FROM clause - - return self._boolean_compare( - expr, op, seq_or_selectable.as_scalar(), - negate=negate_op, **kw) - elif isinstance(seq_or_selectable, (Selectable, TextClause)): - return self._boolean_compare(expr, op, seq_or_selectable, - negate=negate_op, **kw) - elif isinstance(seq_or_selectable, ClauseElement): - raise exc.InvalidRequestError('in_() accepts' +def _binary_operate(expr, op, obj, reverse=False, result_type=None, + **kw): + obj = _check_literal(expr, op, obj) + + if reverse: + left, right = obj, expr + else: + left, right = expr, obj + + if result_type is None: + op, result_type = left.comparator._adapt_expression( + op, right.comparator) + + return BinaryExpression( + left, right, op, type_=result_type, modifiers=kw) + + +def _conjunction_operate(expr, op, other, **kw): + if op is operators.and_: + return and_(expr, other) + elif op is operators.or_: + return or_(expr, other) + else: + raise NotImplementedError() + + +def _scalar(expr, op, fn, **kw): + return fn(expr) + + +def _in_impl(expr, op, seq_or_selectable, negate_op, **kw): + seq_or_selectable = _clause_element_as_expr(seq_or_selectable) + + if isinstance(seq_or_selectable, ScalarSelect): + return _boolean_compare(expr, op, seq_or_selectable, + negate=negate_op) + elif isinstance(seq_or_selectable, SelectBase): + + # TODO: if we ever want to support (x, y, z) IN (select x, + # y, z from table), we would need a multi-column version of + # as_scalar() to produce a multi- column selectable that + # does not export itself as a FROM clause + + return _boolean_compare( + expr, op, seq_or_selectable.as_scalar(), + negate=negate_op, **kw) + elif isinstance(seq_or_selectable, (Selectable, TextClause)): + return _boolean_compare(expr, op, seq_or_selectable, + negate=negate_op, **kw) + elif isinstance(seq_or_selectable, ClauseElement): + raise exc.InvalidRequestError( + 'in_() accepts' + ' either a list of expressions ' + 'or a selectable: %r' % seq_or_selectable) + + # Handle non selectable arguments as sequences + args = [] + for o in seq_or_selectable: + if not _is_literal(o): + if not isinstance(o, operators.ColumnOperators): + raise exc.InvalidRequestError( + 'in_() accepts' ' either a list of expressions ' - 'or a selectable: %r' % seq_or_selectable) - - # Handle non selectable arguments as sequences - args = [] - for o in seq_or_selectable: - if not _is_literal(o): - if not isinstance(o, operators.ColumnOperators): - raise exc.InvalidRequestError('in_() accepts' - ' either a list of expressions ' - 'or a selectable: %r' % o) - elif o is None: - o = Null() - else: - o = expr._bind_param(op, o) - args.append(o) - if len(args) == 0: - - # Special case handling for empty IN's, behave like - # comparison against zero row selectable. We use != to - # build the contradiction as it handles NULL values - # appropriately, i.e. "not (x IN ())" should not return NULL - # values for x. - - util.warn('The IN-predicate on "%s" was invoked with an ' - 'empty sequence. This results in a ' - 'contradiction, which nonetheless can be ' - 'expensive to evaluate. Consider alternative ' - 'strategies for improved performance.' % expr) - if op is operators.in_op: - return expr != expr - else: - return expr == expr - - return self._boolean_compare(expr, op, - ClauseList(*args).self_group(against=op), - negate=negate_op) - - def _unsupported_impl(self, expr, op, *arg, **kw): - raise NotImplementedError("Operator '%s' is not supported on " - "this expression" % op.__name__) - - def _neg_impl(self, expr, op, **kw): - """See :meth:`.ColumnOperators.__neg__`.""" - return UnaryExpression(expr, operator=operators.neg) - - def _match_impl(self, expr, op, other, **kw): - """See :meth:`.ColumnOperators.match`.""" - return self._boolean_compare(expr, operators.match_op, - self._check_literal(expr, operators.match_op, - other)) - - def _distinct_impl(self, expr, op, **kw): - """See :meth:`.ColumnOperators.distinct`.""" - return UnaryExpression(expr, operator=operators.distinct_op, - type_=expr.type) - - def _between_impl(self, expr, op, cleft, cright, **kw): - """See :meth:`.ColumnOperators.between`.""" - return BinaryExpression( - expr, - ClauseList( - self._check_literal(expr, operators.and_, cleft), - self._check_literal(expr, operators.and_, cright), - operator=operators.and_, - group=False, group_contents=False), - operators.between_op) - - def _collate_impl(self, expr, op, other, **kw): - return collate(expr, other) - - # a mapping of operators with the method they use, along with - # their negated operator for comparison operators - operators = { - "add": (_binary_operate,), - "mul": (_binary_operate,), - "sub": (_binary_operate,), - "div": (_binary_operate,), - "mod": (_binary_operate,), - "truediv": (_binary_operate,), - "custom_op": (_binary_operate,), - "concat_op": (_binary_operate,), - "lt": (_boolean_compare, operators.ge), - "le": (_boolean_compare, operators.gt), - "ne": (_boolean_compare, operators.eq), - "gt": (_boolean_compare, operators.le), - "ge": (_boolean_compare, operators.lt), - "eq": (_boolean_compare, operators.ne), - "like_op": (_boolean_compare, operators.notlike_op), - "ilike_op": (_boolean_compare, operators.notilike_op), - "notlike_op": (_boolean_compare, operators.like_op), - "notilike_op": (_boolean_compare, operators.ilike_op), - "contains_op": (_boolean_compare, operators.notcontains_op), - "startswith_op": (_boolean_compare, operators.notstartswith_op), - "endswith_op": (_boolean_compare, operators.notendswith_op), - "desc_op": (_scalar, UnaryExpression._create_desc), - "asc_op": (_scalar, UnaryExpression._create_asc), - "nullsfirst_op": (_scalar, UnaryExpression._create_nullsfirst), - "nullslast_op": (_scalar, UnaryExpression._create_nullslast), - "in_op": (_in_impl, operators.notin_op), - "notin_op": (_in_impl, operators.in_op), - "is_": (_boolean_compare, operators.is_), - "isnot": (_boolean_compare, operators.isnot), - "collate": (_collate_impl,), - "match_op": (_match_impl,), - "distinct_op": (_distinct_impl,), - "between_op": (_between_impl, ), - "neg": (_neg_impl,), - "getitem": (_unsupported_impl,), - "lshift": (_unsupported_impl,), - "rshift": (_unsupported_impl,), - } - - def _check_literal(self, expr, operator, other): - if isinstance(other, (ColumnElement, TextClause)): - if isinstance(other, BindParameter) and \ - other.type._isnull: - other = other._clone() - other.type = expr.type - return other - elif hasattr(other, '__clause_element__'): - other = other.__clause_element__() - elif isinstance(other, type_api.TypeEngine.Comparator): - other = other.expr - - if isinstance(other, (SelectBase, Alias)): - return other.as_scalar() - elif not isinstance(other, (ColumnElement, TextClause)): - return expr._bind_param(operator, other) + 'or a selectable: %r' % o) + elif o is None: + o = Null() + else: + o = expr._bind_param(op, o) + args.append(o) + if len(args) == 0: + + # Special case handling for empty IN's, behave like + # comparison against zero row selectable. We use != to + # build the contradiction as it handles NULL values + # appropriately, i.e. "not (x IN ())" should not return NULL + # values for x. + + util.warn('The IN-predicate on "%s" was invoked with an ' + 'empty sequence. This results in a ' + 'contradiction, which nonetheless can be ' + 'expensive to evaluate. Consider alternative ' + 'strategies for improved performance.' % expr) + if op is operators.in_op: + return expr != expr else: - return other + return expr == expr + + return _boolean_compare(expr, op, + ClauseList(*args).self_group(against=op), + negate=negate_op) + + +def _unsupported_impl(expr, op, *arg, **kw): + raise NotImplementedError("Operator '%s' is not supported on " + "this expression" % op.__name__) + + +def _inv_impl(expr, op, **kw): + """See :meth:`.ColumnOperators.__inv__`.""" + if hasattr(expr, 'negation_clause'): + return expr.negation_clause + else: + return expr._negate() + + +def _neg_impl(expr, op, **kw): + """See :meth:`.ColumnOperators.__neg__`.""" + return UnaryExpression(expr, operator=operators.neg) + + +def _match_impl(expr, op, other, **kw): + """See :meth:`.ColumnOperators.match`.""" + + return _boolean_compare( + expr, operators.match_op, + _check_literal( + expr, operators.match_op, other), + result_type=type_api.MATCHTYPE, + negate=operators.notmatch_op + if op is operators.match_op else operators.match_op, + **kw + ) + + +def _distinct_impl(expr, op, **kw): + """See :meth:`.ColumnOperators.distinct`.""" + return UnaryExpression(expr, operator=operators.distinct_op, + type_=expr.type) + + +def _between_impl(expr, op, cleft, cright, **kw): + """See :meth:`.ColumnOperators.between`.""" + return BinaryExpression( + expr, + ClauseList( + _check_literal(expr, operators.and_, cleft), + _check_literal(expr, operators.and_, cright), + operator=operators.and_, + group=False, group_contents=False), + op, + negate=operators.notbetween_op + if op is operators.between_op + else operators.between_op, + modifiers=kw) + + +def _collate_impl(expr, op, other, **kw): + return collate(expr, other) + +# a mapping of operators with the method they use, along with +# their negated operator for comparison operators +operator_lookup = { + "and_": (_conjunction_operate,), + "or_": (_conjunction_operate,), + "inv": (_inv_impl,), + "add": (_binary_operate,), + "mul": (_binary_operate,), + "sub": (_binary_operate,), + "div": (_binary_operate,), + "mod": (_binary_operate,), + "truediv": (_binary_operate,), + "custom_op": (_binary_operate,), + "concat_op": (_binary_operate,), + "lt": (_boolean_compare, operators.ge), + "le": (_boolean_compare, operators.gt), + "ne": (_boolean_compare, operators.eq), + "gt": (_boolean_compare, operators.le), + "ge": (_boolean_compare, operators.lt), + "eq": (_boolean_compare, operators.ne), + "like_op": (_boolean_compare, operators.notlike_op), + "ilike_op": (_boolean_compare, operators.notilike_op), + "notlike_op": (_boolean_compare, operators.like_op), + "notilike_op": (_boolean_compare, operators.ilike_op), + "contains_op": (_boolean_compare, operators.notcontains_op), + "startswith_op": (_boolean_compare, operators.notstartswith_op), + "endswith_op": (_boolean_compare, operators.notendswith_op), + "desc_op": (_scalar, UnaryExpression._create_desc), + "asc_op": (_scalar, UnaryExpression._create_asc), + "nullsfirst_op": (_scalar, UnaryExpression._create_nullsfirst), + "nullslast_op": (_scalar, UnaryExpression._create_nullslast), + "in_op": (_in_impl, operators.notin_op), + "notin_op": (_in_impl, operators.in_op), + "is_": (_boolean_compare, operators.is_), + "isnot": (_boolean_compare, operators.isnot), + "collate": (_collate_impl,), + "match_op": (_match_impl,), + "notmatch_op": (_match_impl,), + "distinct_op": (_distinct_impl,), + "between_op": (_between_impl, ), + "notbetween_op": (_between_impl, ), + "neg": (_neg_impl,), + "getitem": (_unsupported_impl,), + "lshift": (_unsupported_impl,), + "rshift": (_unsupported_impl,), + "contains": (_unsupported_impl,), +} + + +def _check_literal(expr, operator, other): + if isinstance(other, (ColumnElement, TextClause)): + if isinstance(other, BindParameter) and \ + other.type._isnull: + other = other._clone() + other.type = expr.type + return other + elif hasattr(other, '__clause_element__'): + other = other.__clause_element__() + elif isinstance(other, type_api.TypeEngine.Comparator): + other = other.expr + + if isinstance(other, (SelectBase, Alias)): + return other.as_scalar() + elif not isinstance(other, (ColumnElement, TextClause)): + return expr._bind_param(operator, other) + else: + return other diff --git a/lib/sqlalchemy/sql/dml.py b/lib/sqlalchemy/sql/dml.py index 098f2d5842..7b506f9db9 100644 --- a/lib/sqlalchemy/sql/dml.py +++ b/lib/sqlalchemy/sql/dml.py @@ -1,5 +1,6 @@ # sql/dml.py -# Copyright (C) 2009-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2009-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -9,11 +10,13 @@ """ from .base import Executable, _generative, _from_objects, DialectKWArgs -from .elements import ClauseElement, _literal_as_text, Null, and_, _clone +from .elements import ClauseElement, _literal_as_text, Null, and_, _clone, \ + _column_as_key from .selectable import _interpret_as_from, _interpret_as_select, HasPrefixes from .. import util from .. import exc + class UpdateBase(DialectKWArgs, HasPrefixes, Executable, ClauseElement): """Form the base for ``INSERT``, ``UPDATE``, and ``DELETE`` statements. @@ -24,6 +27,7 @@ class UpdateBase(DialectKWArgs, HasPrefixes, Executable, ClauseElement): _execution_options = \ Executable._execution_options.union({'autocommit': True}) _hints = util.immutabledict() + _parameter_ordering = None _prefixes = () def _process_colparams(self, parameters): @@ -36,9 +40,18 @@ def process_single(p): else: return p - if isinstance(parameters, (list, tuple)) and \ - parameters and \ - isinstance(parameters[0], (list, tuple, dict)): + if self._preserve_parameter_order and parameters is not None: + if not isinstance(parameters, list) or \ + (parameters and not isinstance(parameters[0], tuple)): + raise ValueError( + "When preserve_parameter_order is True, " + "values() only accepts a list of 2-tuples") + self._parameter_ordering = [key for key, value in parameters] + + return dict(parameters), False + + if (isinstance(parameters, (list, tuple)) and parameters and + isinstance(parameters[0], (list, tuple, dict))): if not self._supports_multi_parameters: raise exc.InvalidRequestError( @@ -82,7 +95,8 @@ def returning(self, *cols): stmt = table.update().\\ where(table.c.data == 'value').\\ values(status='X').\\ - returning(table.c.server_flag, table.c.updated_timestamp) + returning(table.c.server_flag, + table.c.updated_timestamp) for server_flag, updated_timestamp in connection.execute(stmt): print(server_flag, updated_timestamp) @@ -93,21 +107,20 @@ def returning(self, *cols): objects are typical, the elements can also be expressions:: stmt = table.insert().returning( - (table.c.first_name + " " + table.c.last_name).label('fullname') - ) + (table.c.first_name + " " + table.c.last_name). + label('fullname')) Upon compilation, a RETURNING clause, or database equivalent, will be rendered within the statement. For INSERT and UPDATE, the values are the newly inserted/updated values. For DELETE, the values are those of the rows which were deleted. - Upon execution, the values of the columns to be returned - are made available via the result set and can be iterated - using :meth:`.ResultProxy.fetchone` and similar. For DBAPIs which do not - natively support returning values (i.e. cx_oracle), - SQLAlchemy will approximate this behavior at the result level - so that a reasonable amount of behavioral neutrality is - provided. + Upon execution, the values of the columns to be returned are made + available via the result set and can be iterated using + :meth:`.ResultProxy.fetchone` and similar. For DBAPIs which do not + natively support returning values (i.e. cx_oracle), SQLAlchemy will + approximate this behavior at the result level so that a reasonable + amount of behavioral neutrality is provided. Note that not all databases/DBAPIs support RETURNING. For those backends with no support, @@ -128,7 +141,6 @@ def returning(self, *cols): """ self._returning = cols - @_generative def with_hint(self, text, selectable=None, dialect_name="*"): """Add a table hint for a single table to this @@ -166,7 +178,7 @@ def with_hint(self, text, selectable=None, dialect_name="*"): selectable = self.table self._hints = self._hints.union( - {(selectable, dialect_name): text}) + {(selectable, dialect_name): text}) class ValuesBase(UpdateBase): @@ -177,12 +189,13 @@ class ValuesBase(UpdateBase): _supports_multi_parameters = False _has_multi_parameters = False + _preserve_parameter_order = False select = None def __init__(self, table, values, prefixes): self.table = _interpret_as_from(table) self.parameters, self._has_multi_parameters = \ - self._process_colparams(values) + self._process_colparams(values) if prefixes: self._setup_prefixes(prefixes) @@ -193,9 +206,9 @@ def values(self, *args, **kwargs): Note that the :class:`.Insert` and :class:`.Update` constructs support per-execution time formatting of the VALUES and/or SET clauses, - based on the arguments passed to :meth:`.Connection.execute`. However, - the :meth:`.ValuesBase.values` method can be used to "fix" a particular - set of parameters into the statement. + based on the arguments passed to :meth:`.Connection.execute`. + However, the :meth:`.ValuesBase.values` method can be used to "fix" a + particular set of parameters into the statement. Multiple calls to :meth:`.ValuesBase.values` will produce a new construct, each one with the parameter list modified to include @@ -213,23 +226,32 @@ def values(self, *args, **kwargs): users.update().where(users.c.id==5).values(name="some name") - :param \*args: Alternatively, a dictionary, tuple or list - of dictionaries or tuples can be passed as a single positional - argument in order to form the VALUES or - SET clause of the statement. The single dictionary form - works the same as the kwargs form:: + :param \*args: As an alternative to passing key/value parameters, + a dictionary, tuple, or list of dictionaries or tuples can be passed + as a single positional argument in order to form the VALUES or + SET clause of the statement. The forms that are accepted vary + based on whether this is an :class:`.Insert` or an :class:`.Update` + construct. + + For either an :class:`.Insert` or :class:`.Update` construct, a + single dictionary can be passed, which works the same as that of + the kwargs form:: users.insert().values({"name": "some name"}) - If a tuple is passed, the tuple should contain the same number - of columns as the target :class:`.Table`:: + users.update().values({"name": "some new name"}) + + Also for either form but more typically for the :class:`.Insert` + construct, a tuple that contains an entry for every column in the + table is also accepted:: users.insert().values((5, "some name")) - The :class:`.Insert` construct also supports multiply-rendered VALUES - construct, for those backends which support this SQL syntax - (SQLite, Postgresql, MySQL). This mode is indicated by passing a list - of one or more dictionaries/tuples:: + The :class:`.Insert` construct also supports being passed a list + of dictionaries or full-table-tuples, which on the server will + render the less common SQL syntax of "multiple values" - this + syntax is supported on backends such as SQLite, Postgresql, MySQL, + but not necessarily others:: users.insert().values([ {"name": "some name"}, @@ -237,37 +259,61 @@ def values(self, *args, **kwargs): {"name": "yet another name"}, ]) - In the case of an :class:`.Update` - construct, only the single dictionary/tuple form is accepted, - else an exception is raised. It is also an exception case to - attempt to mix the single-/multiple- value styles together, - either through multiple :meth:`.ValuesBase.values` calls - or by sending a list + kwargs at the same time. - - .. note:: - - Passing a multiple values list is *not* the same - as passing a multiple values list to the :meth:`.Connection.execute` - method. Passing a list of parameter sets to :meth:`.ValuesBase.values` - produces a construct of this form:: - - INSERT INTO table (col1, col2, col3) VALUES - (col1_0, col2_0, col3_0), - (col1_1, col2_1, col3_1), - ... - - whereas a multiple list passed to :meth:`.Connection.execute` - has the effect of using the DBAPI - `executemany() `_ - method, which provides a high-performance system of invoking - a single-row INSERT statement many times against a series - of parameter sets. The "executemany" style is supported by - all database backends, as it does not depend on a special SQL - syntax. - - .. versionadded:: 0.8 - Support for multiple-VALUES INSERT statements. - + The above form would render a multiple VALUES statement similar to:: + + INSERT INTO users (name) VALUES + (:name_1), + (:name_2), + (:name_3) + + It is essential to note that **passing multiple values is + NOT the same as using traditional executemany() form**. The above + syntax is a **special** syntax not typically used. To emit an + INSERT statement against multiple rows, the normal method is + to pass a multiple values list to the :meth:`.Connection.execute` + method, which is supported by all database backends and is generally + more efficient for a very large number of parameters. + + .. seealso:: + + :ref:`execute_multiple` - an introduction to + the traditional Core method of multiple parameter set + invocation for INSERTs and other statements. + + .. versionchanged:: 1.0.0 an INSERT that uses a multiple-VALUES + clause, even a list of length one, + implies that the :paramref:`.Insert.inline` flag is set to + True, indicating that the statement will not attempt to fetch + the "last inserted primary key" or other defaults. The + statement deals with an arbitrary number of rows, so the + :attr:`.ResultProxy.inserted_primary_key` accessor does not + apply. + + .. versionchanged:: 1.0.0 A multiple-VALUES INSERT now supports + columns with Python side default values and callables in the + same way as that of an "executemany" style of invocation; the + callable is invoked for each row. See :ref:`bug_3288` + for other details. + + The :class:`.Update` construct supports a special form which is a + list of 2-tuples, which when provided must be passed in conjunction + with the + :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order` + parameter. + This form causes the UPDATE statement to render the SET clauses + using the order of parameters given to :meth:`.Update.values`, rather + than the ordering of columns given in the :class:`.Table`. + + .. versionadded:: 1.0.10 - added support for parameter-ordered + UPDATE statements via the + :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order` + flag. + + .. seealso:: + + :ref:`updates_order_parameters` - full example of the + :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order` + flag .. seealso:: @@ -281,23 +327,23 @@ def values(self, *args, **kwargs): """ if self.select is not None: raise exc.InvalidRequestError( - "This construct already inserts from a SELECT") + "This construct already inserts from a SELECT") if self._has_multi_parameters and kwargs: raise exc.InvalidRequestError( - "This construct already has multiple parameter sets.") + "This construct already has multiple parameter sets.") if args: if len(args) > 1: raise exc.ArgumentError( - "Only a single dictionary/tuple or list of " - "dictionaries/tuples is accepted positionally.") + "Only a single dictionary/tuple or list of " + "dictionaries/tuples is accepted positionally.") v = args[0] else: v = {} if self.parameters is None: self.parameters, self._has_multi_parameters = \ - self._process_colparams(v) + self._process_colparams(v) else: if self._has_multi_parameters: self.parameters = list(self.parameters) @@ -320,8 +366,8 @@ def values(self, *args, **kwargs): if kwargs: if self._has_multi_parameters: raise exc.ArgumentError( - "Can't pass kwargs and multiple parameter sets " - "simultaenously") + "Can't pass kwargs and multiple parameter sets " + "simultaenously") else: self.parameters.update(kwargs) @@ -339,47 +385,47 @@ def return_defaults(self, *cols): server_created_at = result.returned_defaults['created_at'] When used against a backend that supports RETURNING, all column - values generated by SQL expression or server-side-default will be added - to any existing RETURNING clause, provided that - :meth:`.UpdateBase.returning` is not used simultaneously. The column values - will then be available on the result using the - :attr:`.ResultProxy.returned_defaults` accessor as a - dictionary, referring to values keyed to the :class:`.Column` object - as well as its ``.key``. + values generated by SQL expression or server-side-default will be + added to any existing RETURNING clause, provided that + :meth:`.UpdateBase.returning` is not used simultaneously. The column + values will then be available on the result using the + :attr:`.ResultProxy.returned_defaults` accessor as a dictionary, + referring to values keyed to the :class:`.Column` object as well as + its ``.key``. This method differs from :meth:`.UpdateBase.returning` in these ways: 1. :meth:`.ValuesBase.return_defaults` is only intended for use with an INSERT or an UPDATE statement that matches exactly one row. - While the RETURNING construct in the general sense supports multiple - rows for a multi-row UPDATE or DELETE statement, or for special - cases of INSERT that return multiple rows (e.g. INSERT from SELECT, - multi-valued VALUES clause), :meth:`.ValuesBase.return_defaults` - is intended only - for an "ORM-style" single-row INSERT/UPDATE statement. The row - returned by the statement is also consumed implcitly when + While the RETURNING construct in the general sense supports + multiple rows for a multi-row UPDATE or DELETE statement, or for + special cases of INSERT that return multiple rows (e.g. INSERT from + SELECT, multi-valued VALUES clause), + :meth:`.ValuesBase.return_defaults` is intended only for an + "ORM-style" single-row INSERT/UPDATE statement. The row returned + by the statement is also consumed implicitly when :meth:`.ValuesBase.return_defaults` is used. By contrast, - :meth:`.UpdateBase.returning` leaves the RETURNING result-set intact - with a collection of any number of rows. + :meth:`.UpdateBase.returning` leaves the RETURNING result-set + intact with a collection of any number of rows. 2. It is compatible with the existing logic to fetch auto-generated - primary key values, also known as "implicit returning". Backends that - support RETURNING will automatically make use of RETURNING in order - to fetch the value of newly generated primary keys; while the + primary key values, also known as "implicit returning". Backends + that support RETURNING will automatically make use of RETURNING in + order to fetch the value of newly generated primary keys; while the :meth:`.UpdateBase.returning` method circumvents this behavior, :meth:`.ValuesBase.return_defaults` leaves it intact. 3. It can be called against any backend. Backends that don't support RETURNING will skip the usage of the feature, rather than raising - an exception. The return value of :attr:`.ResultProxy.returned_defaults` - will be ``None`` + an exception. The return value of + :attr:`.ResultProxy.returned_defaults` will be ``None`` :meth:`.ValuesBase.return_defaults` is used by the ORM to provide an efficient implementation for the ``eager_defaults`` feature of :func:`.mapper`. :param cols: optional list of column key names or :class:`.Column` - objects. If omitted, all column expressions evaulated on the server + objects. If omitted, all column expressions evaluated on the server are added to the returning list. .. versionadded:: 0.9.0 @@ -410,21 +456,22 @@ class Insert(ValuesBase): _supports_multi_parameters = True def __init__(self, - table, - values=None, - inline=False, - bind=None, - prefixes=None, - returning=None, - return_defaults=False, - **dialect_kw): + table, + values=None, + inline=False, + bind=None, + prefixes=None, + returning=None, + return_defaults=False, + **dialect_kw): """Construct an :class:`.Insert` object. Similar functionality is available via the :meth:`~.TableClause.insert` method on :class:`~.schema.Table`. - :param table: :class:`.TableClause` which is the subject of the insert. + :param table: :class:`.TableClause` which is the subject of the + insert. :param values: collection of values to be inserted; see :meth:`.Insert.values` for a description of allowed formats here. @@ -432,15 +479,21 @@ def __init__(self, dynamically render the VALUES clause at execution time based on the parameters passed to :meth:`.Connection.execute`. - :param inline: if True, SQL defaults will be compiled 'inline' into the - statement and not pre-executed. + :param inline: if True, no attempt will be made to retrieve the + SQL-generated default values to be provided within the statement; + in particular, + this allows SQL expressions to be rendered 'inline' within the + statement without the need to pre-execute them beforehand; for + backends that support "returning", this turns off the "implicit + returning" feature for the statement. If both `values` and compile-time bind parameters are present, the compile-time bind parameters override the information specified within `values` on a per-key basis. - The keys within `values` can be either :class:`~sqlalchemy.schema.Column` - objects or their string identifiers. Each key may reference one of: + The keys within `values` can be either + :class:`~sqlalchemy.schema.Column` objects or their string + identifiers. Each key may reference one of: * a literal data value (i.e. string, number, etc.); * a Column object; @@ -460,6 +513,7 @@ def __init__(self, ValuesBase.__init__(self, table, values, prefixes) self._bind = bind self.select = self.select_names = None + self.include_insert_from_select_defaults = False self.inline = inline self._returning = returning self._validate_dialect_kwargs(dialect_kw) @@ -472,7 +526,7 @@ def get_children(self, **kwargs): return () @_generative - def from_select(self, names, select): + def from_select(self, names, select, include_defaults=True): """Return a new :class:`.Insert` construct which represents an ``INSERT...FROM SELECT`` statement. @@ -491,36 +545,43 @@ def from_select(self, names, select): is not checked before passing along to the database, the database would normally raise an exception if these column lists don't correspond. - - .. note:: - - Depending on backend, it may be necessary for the :class:`.Insert` - statement to be constructed using the ``inline=True`` flag; this - flag will prevent the implicit usage of ``RETURNING`` when the - ``INSERT`` statement is rendered, which isn't supported on a backend - such as Oracle in conjunction with an ``INSERT..SELECT`` combination:: - - sel = select([table1.c.a, table1.c.b]).where(table1.c.c > 5) - ins = table2.insert(inline=True).from_select(['a', 'b'], sel) - - .. note:: - - A SELECT..INSERT construct in SQL has no VALUES clause. Therefore - :class:`.Column` objects which utilize Python-side defaults - (e.g. as described at :ref:`metadata_defaults_toplevel`) - will **not** take effect when using :meth:`.Insert.from_select`. + :param include_defaults: if True, non-server default values and + SQL expressions as specified on :class:`.Column` objects + (as documented in :ref:`metadata_defaults_toplevel`) not + otherwise specified in the list of names will be rendered + into the INSERT and SELECT statements, so that these values are also + included in the data to be inserted. + + .. note:: A Python-side default that uses a Python callable function + will only be invoked **once** for the whole statement, and **not + per row**. + + .. versionadded:: 1.0.0 - :meth:`.Insert.from_select` now renders + Python-side and SQL expression column defaults into the + SELECT statement for columns otherwise not included in the + list of column names. + + .. versionchanged:: 1.0.0 an INSERT that uses FROM SELECT + implies that the :paramref:`.insert.inline` flag is set to + True, indicating that the statement will not attempt to fetch + the "last inserted primary key" or other defaults. The statement + deals with an arbitrary number of rows, so the + :attr:`.ResultProxy.inserted_primary_key` accessor does not apply. .. versionadded:: 0.8.3 """ if self.parameters: raise exc.InvalidRequestError( - "This construct already inserts value expressions") + "This construct already inserts value expressions") self.parameters, self._has_multi_parameters = \ - self._process_colparams(dict((n, Null()) for n in names)) + self._process_colparams( + dict((_column_as_key(n), Null()) for n in names)) self.select_names = names + self.inline = True + self.include_insert_from_select_defaults = include_defaults self.select = _interpret_as_select(select) def _copy_internals(self, clone=_clone, **kw): @@ -533,21 +594,23 @@ def _copy_internals(self, clone=_clone, **kw): class Update(ValuesBase): """Represent an Update construct. - The :class:`.Update` object is created using the :func:`update()` function. + The :class:`.Update` object is created using the :func:`update()` + function. """ __visit_name__ = 'update' def __init__(self, - table, - whereclause=None, - values=None, - inline=False, - bind=None, - prefixes=None, - returning=None, - return_defaults=False, - **dialect_kw): + table, + whereclause=None, + values=None, + inline=False, + bind=None, + prefixes=None, + returning=None, + return_defaults=False, + preserve_parameter_order=False, + **dialect_kw): """Construct an :class:`.Update` object. E.g.:: @@ -609,6 +672,19 @@ def __init__(self, be available in the dictionary returned from :meth:`.ResultProxy.last_updated_params`. + :param preserve_parameter_order: if True, the update statement is + expected to receive parameters **only** via the :meth:`.Update.values` + method, and they must be passed as a Python ``list`` of 2-tuples. + The rendered UPDATE statement will emit the SET clause for each + referenced column maintaining this order. + + .. versionadded:: 1.0.10 + + .. seealso:: + + :ref:`updates_order_parameters` - full example of the + :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order` flag + If both ``values`` and compile-time bind parameters are present, the compile-time bind parameters override the information specified within ``values`` on a per-key basis. @@ -650,6 +726,7 @@ def __init__(self, """ + self._preserve_parameter_order = preserve_parameter_order ValuesBase.__init__(self, table, values, prefixes) self._bind = bind self._returning = returning @@ -661,7 +738,6 @@ def __init__(self, self._validate_dialect_kwargs(dialect_kw) self._return_defaults = return_defaults - def get_children(self, **kwargs): if self._whereclause is not None: return self._whereclause, @@ -681,7 +757,7 @@ def where(self, whereclause): """ if self._whereclause is not None: self._whereclause = and_(self._whereclause, - _literal_as_text(whereclause)) + _literal_as_text(whereclause)) else: self._whereclause = _literal_as_text(whereclause) @@ -704,29 +780,30 @@ def _extra_froms(self): class Delete(UpdateBase): """Represent a DELETE construct. - The :class:`.Delete` object is created using the :func:`delete()` function. + The :class:`.Delete` object is created using the :func:`delete()` + function. """ __visit_name__ = 'delete' def __init__(self, - table, - whereclause=None, - bind=None, - returning=None, - prefixes=None, - **dialect_kw): + table, + whereclause=None, + bind=None, + returning=None, + prefixes=None, + **dialect_kw): """Construct :class:`.Delete` object. Similar functionality is available via the :meth:`~.TableClause.delete` method on :class:`~.schema.Table`. - :param table: The table to be updated. + :param table: The table to delete rows from. :param whereclause: A :class:`.ClauseElement` describing the ``WHERE`` - condition of the ``UPDATE`` statement. Note that the + condition of the ``DELETE`` statement. Note that the :meth:`~Delete.where()` generative method may be used instead. .. seealso:: @@ -760,11 +837,10 @@ def where(self, whereclause): if self._whereclause is not None: self._whereclause = and_(self._whereclause, - _literal_as_text(whereclause)) + _literal_as_text(whereclause)) else: self._whereclause = _literal_as_text(whereclause) def _copy_internals(self, clone=_clone, **kw): # TODO: coverage self._whereclause = clone(self._whereclause, **kw) - diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index c230fb0d32..c564777b45 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -1,5 +1,6 @@ # sql/elements.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -18,14 +19,17 @@ from .annotation import Annotated import itertools from .base import Executable, PARSE_AUTOCOMMIT, Immutable, NO_ARG -from .base import _generative, Generative +from .base import _generative +import numbers import re import operator + def _clone(element, **kw): return element._clone() + def collate(expression, collation): """Return the clause ``expression COLLATE collation``. @@ -45,7 +49,8 @@ def collate(expression, collation): _literal_as_text(collation), operators.collate, type_=expr.type) -def between(expr, lower_bound, upper_bound): + +def between(expr, lower_bound, upper_bound, symmetric=False): """Produce a ``BETWEEN`` predicate clause. E.g.:: @@ -79,29 +84,34 @@ def between(expr, lower_bound, upper_bound): into a column expression, serving as the left side of the ``BETWEEN`` expression. - :param lower_bound: a column or Python scalar expression serving as the lower - bound of the right side of the ``BETWEEN`` expression. + :param lower_bound: a column or Python scalar expression serving as the + lower bound of the right side of the ``BETWEEN`` expression. :param upper_bound: a column or Python scalar expression serving as the upper bound of the right side of the ``BETWEEN`` expression. + :param symmetric: if True, will render " BETWEEN SYMMETRIC ". Note + that not all databases support this syntax. + + .. versionadded:: 0.9.5 + .. seealso:: :meth:`.ColumnElement.between` """ expr = _literal_as_binds(expr) - return expr.between(lower_bound, upper_bound) + return expr.between(lower_bound, upper_bound, symmetric=symmetric) + def literal(value, type_=None): """Return a literal clause, bound to a bind parameter. - Literal clauses are created automatically when non- :class:`.ClauseElement` - objects (such as strings, ints, dates, etc.) are used in a comparison - operation with a :class:`.ColumnElement` - subclass, such as a :class:`~sqlalchemy.schema.Column` object. - Use this function to force the - generation of a literal clause, which will be created as a + Literal clauses are created automatically when non- + :class:`.ClauseElement` objects (such as strings, ints, dates, etc.) are + used in a comparison operation with a :class:`.ColumnElement` subclass, + such as a :class:`~sqlalchemy.schema.Column` object. Use this function + to force the generation of a literal clause, which will be created as a :class:`BindParameter` with a bound value. :param value: the value to be bound. Can be any Python object supported by @@ -114,7 +124,6 @@ def literal(value, type_=None): return BindParameter(None, value, type_=type_, unique=True) - def type_coerce(expression, type_): """Associate a SQL expression with a particular type, without rendering ``CAST``. @@ -138,7 +147,7 @@ def type_coerce(expression, type_): passed to :func:`.type_coerce` as targets. For example, if a type implements the :meth:`.TypeEngine.bind_expression` method or :meth:`.TypeEngine.bind_processor` method or equivalent, - these functions will take effect at statement compliation/execution time + these functions will take effect at statement compilation/execution time when a literal value is passed, as in:: # bound-value handling of MyStringType will be applied to the @@ -149,11 +158,12 @@ def type_coerce(expression, type_): except that it does not render the ``CAST`` expression in the resulting statement. - :param expression: A SQL expression, such as a :class:`.ColumnElement` expression - or a Python string which will be coerced into a bound literal value. + :param expression: A SQL expression, such as a :class:`.ColumnElement` + expression or a Python string which will be coerced into a bound literal + value. :param type_: A :class:`.TypeEngine` class or instance indicating - the type to which the the expression is coerced. + the type to which the expression is coerced. .. seealso:: @@ -177,9 +187,6 @@ def type_coerce(expression, type_): return Label(None, expression, type_=type_) - - - def outparam(key, type_=None): """Create an 'OUT' parameter for usage in functions (stored procedures), for databases which support them. @@ -191,9 +198,7 @@ def outparam(key, type_=None): """ return BindParameter( - key, None, type_=type_, unique=False, isoutparam=True) - - + key, None, type_=type_, unique=False, isoutparam=True) def not_(clause): @@ -207,7 +212,6 @@ def not_(clause): return operators.inv(_literal_as_binds(clause)) - @inspection._self_inspects class ClauseElement(Visitable): """Base class for elements of a programmatically constructed SQL @@ -224,7 +228,9 @@ class ClauseElement(Visitable): is_selectable = False is_clause_element = True + description = None _order_by_label_element = None + _is_from_container = False def _clone(self): """Create a shallow copy of this ClauseElement. @@ -444,8 +450,8 @@ def compile(self, default, bind=None, dialect=None, **kw): :param dialect: A ``Dialect`` instance from which a ``Compiled`` will be acquired. This argument takes precedence over the `bind` - argument as well as this :class:`.ClauseElement`'s bound engine, if - any. + argument as well as this :class:`.ClauseElement`'s bound engine, + if any. :param inline: Used for INSERT statements, for a dialect which does not support inline retrieval of newly generated primary key @@ -455,6 +461,26 @@ def compile(self, default, bind=None, dialect=None, **kw): also refer to any server-side default generation function associated with a primary key `Column`. + :param compile_kwargs: optional dictionary of additional parameters + that will be passed through to the compiler within all "visit" + methods. This allows any custom flag to be passed through to + a custom compilation construct, for example. It is also used + for the case of passing the ``literal_binds`` flag through:: + + from sqlalchemy.sql import table, column, select + + t = table('t', column('x')) + + s = select([t]).where(t.c.x == 5) + + print s.compile(compile_kwargs={"literal_binds": True}) + + .. versionadded:: 0.9.0 + + .. seealso:: + + :ref:`faq_sql_expression_string` + """ if not dialect: @@ -480,9 +506,21 @@ def __str__(self): return unicode(self.compile()).encode('ascii', 'backslashreplace') def __and__(self, other): + """'and' at the ClauseElement level. + + .. deprecated:: 0.9.5 - conjunctions are intended to be + at the :class:`.ColumnElement`. level + + """ return and_(self, other) def __or__(self, other): + """'or' at the ClauseElement level. + + .. deprecated:: 0.9.5 - conjunctions are intended to be + at the :class:`.ColumnElement`. level + + """ return or_(self, other) def __invert__(self): @@ -491,19 +529,19 @@ def __invert__(self): else: return self._negate() + def _negate(self): + return UnaryExpression( + self.self_group(against=operators.inv), + operator=operators.inv, + negate=None) + def __bool__(self): raise TypeError("Boolean value of this clause is not defined") __nonzero__ = __bool__ - def _negate(self): - return UnaryExpression( - self.self_group(against=operators.inv), - operator=operators.inv, - negate=None) - def __repr__(self): - friendly = getattr(self, 'description', None) + friendly = self.description if friendly is None: return object.__repr__(self) else: @@ -511,8 +549,7 @@ def __repr__(self): self.__module__, self.__class__.__name__, id(self), friendly) - -class ColumnElement(ClauseElement, operators.ColumnOperators): +class ColumnElement(operators.ColumnOperators, ClauseElement): """Represent a column-oriented SQL expression suitable for usage in the "columns" clause, WHERE clause etc. of a statement. @@ -523,40 +560,42 @@ class ColumnElement(ClauseElement, operators.ColumnOperators): literal expressions, keywords such as ``NULL``, etc. :class:`.ColumnElement` is the ultimate base class for all such elements. - A wide variety of SQLAlchemy Core functions work at the SQL expression level, - and are intended to accept instances of :class:`.ColumnElement` as arguments. - These functions will typically document that they accept a "SQL expression" - as an argument. What this means in terms of SQLAlchemy usually refers - to an input which is either already in the form of a :class:`.ColumnElement` - object, or a value which can be **coerced** into one. The coercion - rules followed by most, but not all, SQLAlchemy Core functions with regards - to SQL expressions are as follows: + A wide variety of SQLAlchemy Core functions work at the SQL expression + level, and are intended to accept instances of :class:`.ColumnElement` as + arguments. These functions will typically document that they accept a + "SQL expression" as an argument. What this means in terms of SQLAlchemy + usually refers to an input which is either already in the form of a + :class:`.ColumnElement` object, or a value which can be **coerced** into + one. The coercion rules followed by most, but not all, SQLAlchemy Core + functions with regards to SQL expressions are as follows: * a literal Python value, such as a string, integer or floating point value, boolean, datetime, ``Decimal`` object, or virtually - any other Python object, will be coerced into a "literal bound value". - This generally means that a :func:`.bindparam` will be produced - featuring the given value embedded into the construct; the resulting - :class:`.BindParameter` object is an instance of :class:`.ColumnElement`. - The Python value will ultimately be sent to the DBAPI at execution time as a - paramterized argument to the ``execute()`` or ``executemany()`` methods, - after SQLAlchemy type-specific converters (e.g. those provided by - any associated :class:`.TypeEngine` objects) are applied to the value. - - * any special object value, typically ORM-level constructs, which feature - a method called ``__clause_element__()``. The Core expression system - looks for this method when an object of otherwise unknown type is passed - to a function that is looking to coerce the argument into a :class:`.ColumnElement` - expression. The ``__clause_element__()`` method, if present, should - return a :class:`.ColumnElement` instance. The primary use of - ``__clause_element__()`` within SQLAlchemy is that of class-bound attributes - on ORM-mapped classes; a ``User`` class which contains a mapped attribute - named ``.name`` will have a method ``User.name.__clause_element__()`` - which when invoked returns the :class:`.Column` called ``name`` associated - with the mapped table. - - * The Python ``None`` value is typically interpreted as ``NULL``, which - in SQLAlchemy Core produces an instance of :func:`.null`. + any other Python object, will be coerced into a "literal bound + value". This generally means that a :func:`.bindparam` will be + produced featuring the given value embedded into the construct; the + resulting :class:`.BindParameter` object is an instance of + :class:`.ColumnElement`. The Python value will ultimately be sent + to the DBAPI at execution time as a paramterized argument to the + ``execute()`` or ``executemany()`` methods, after SQLAlchemy + type-specific converters (e.g. those provided by any associated + :class:`.TypeEngine` objects) are applied to the value. + + * any special object value, typically ORM-level constructs, which + feature a method called ``__clause_element__()``. The Core + expression system looks for this method when an object of otherwise + unknown type is passed to a function that is looking to coerce the + argument into a :class:`.ColumnElement` expression. The + ``__clause_element__()`` method, if present, should return a + :class:`.ColumnElement` instance. The primary use of + ``__clause_element__()`` within SQLAlchemy is that of class-bound + attributes on ORM-mapped classes; a ``User`` class which contains a + mapped attribute named ``.name`` will have a method + ``User.name.__clause_element__()`` which when invoked returns the + :class:`.Column` called ``name`` associated with the mapped table. + + * The Python ``None`` value is typically interpreted as ``NULL``, + which in SQLAlchemy Core produces an instance of :func:`.null`. A :class:`.ColumnElement` provides the ability to generate new :class:`.ColumnElement` @@ -587,19 +626,88 @@ class ColumnElement(ClauseElement, operators.ColumnOperators): __visit_name__ = 'column' primary_key = False foreign_keys = [] + _label = None - _key_label = key = None + """The named label that can be used to target + this column in a result set. + + This label is almost always the label used when + rendering AS AS "; typically columns that don't have + any parent table and are named the same as what the label would be + in any case. + + """ + + _resolve_label = None + """The name that should be used to identify this ColumnElement in a + select() object when "label resolution" logic is used; this refers + to using a string name in an expression like order_by() or group_by() + that wishes to target a labeled expression in the columns clause. + + The name is distinct from that of .name or ._label to account for the case + where anonymizing logic may be used to change the name that's actually + rendered at compile time; this attribute should hold onto the original + name that was user-assigned when producing a .label() construct. + + """ + + _allow_label_resolve = True + """A flag that can be flipped to prevent a column from being resolvable + by string label name.""" + _alt_names = () def self_group(self, against=None): - if against in (operators.and_, operators.or_, operators._asbool) and \ - self.type._type_affinity is type_api.BOOLEANTYPE._type_affinity: + if (against in (operators.and_, operators.or_, operators._asbool) and + self.type._type_affinity + is type_api.BOOLEANTYPE._type_affinity): return AsBoolean(self, operators.istrue, operators.isfalse) else: return self def _negate(self): if self.type._type_affinity is type_api.BOOLEANTYPE._type_affinity: + # TODO: see the note in AsBoolean that it seems to assume + # the element is the True_() / False_() constant, so this + # is too broad return AsBoolean(self, operators.isfalse, operators.istrue) else: return super(ColumnElement, self)._negate() @@ -610,14 +718,21 @@ def type(self): @util.memoized_property def comparator(self): - return self.type.comparator_factory(self) + try: + comparator_factory = self.type.comparator_factory + except AttributeError: + raise TypeError( + "Object %r associated with '.type' attribute " + "is not a TypeEngine class or object" % self.type) + else: + return comparator_factory(self) def __getattr__(self, key): try: return getattr(self.comparator, key) except AttributeError: raise AttributeError( - 'Neither %r object nor %r object has an attribute %r' % ( + 'Neither %r object nor %r object has an attribute %r' % ( type(self).__name__, type(self.comparator).__name__, key) @@ -631,8 +746,8 @@ def reverse_operate(self, op, other, **kwargs): def _bind_param(self, operator, obj): return BindParameter(None, obj, - _compared_to_operator=operator, - _compared_to_type=self.type, unique=True) + _compared_to_operator=operator, + _compared_to_type=self.type, unique=True) @property def expression(self): @@ -650,7 +765,7 @@ def _select_iterable(self): @util.memoized_property def base_columns(self): return util.column_set(c for c in self.proxy_set - if not hasattr(c, '_proxies')) + if not hasattr(c, '_proxies')) @util.memoized_property def proxy_set(self): @@ -671,9 +786,10 @@ def _compare_name_for_result(self, other): when targeting within a result row.""" return hasattr(other, 'name') and hasattr(self, 'name') and \ - other.name == self.name + other.name == self.name - def _make_proxy(self, selectable, name=None, name_is_truncatable=False, **kw): + def _make_proxy( + self, selectable, name=None, name_is_truncatable=False, **kw): """Create a new :class:`.ColumnElement` representing this :class:`.ColumnElement` as it appears in the select list of a descending selectable. @@ -692,10 +808,10 @@ def _make_proxy(self, selectable, name=None, name_is_truncatable=False, **kw): else: key = name co = ColumnClause( - _as_truncated(name) if name_is_truncatable else name, - type_=getattr(self, 'type', None), - _selectable=selectable - ) + _as_truncated(name) if name_is_truncatable else name, + type_=getattr(self, 'type', None), + _selectable=selectable + ) co._proxies = [self] if selectable._is_clone_of is not None: co._is_clone_of = \ @@ -713,8 +829,8 @@ def compare(self, other, use_proxies=False, equivalents=None, **kw): :param equivalents: a dictionary of columns as keys mapped to sets of columns. If the given "other" column is present in this - dictionary, if any of the columns in the corresponding set() pass the - comparison test, the result is True. This is used to expand the + dictionary, if any of the columns in the corresponding set() pass + the comparison test, the result is True. This is used to expand the comparison to other columns that may be known to be equivalent to this one via foreign key or other criterion. @@ -731,6 +847,16 @@ def compare(self, other, use_proxies=False, equivalents=None, **kw): else: return False + def cast(self, type_): + """Produce a type cast, i.e. ``CAST( AS )``. + + This is a shortcut to the :func:`~.expression.cast` function. + + .. versionadded:: 1.0.7 + + """ + return Cast(self, type_) + def label(self, name): """Produce a column label, i.e. `` AS ``. @@ -755,8 +881,12 @@ def anon_label(self): expressions and function calls. """ - return _anonymous_label('%%(%d %s)s' % (id(self), getattr(self, - 'name', 'anon'))) + while self._is_clone_of is not None: + self = self._is_clone_of + + return _anonymous_label( + '%%(%d %s)s' % (id(self), getattr(self, 'name', 'anon')) + ) class BindParameter(ColumnElement): @@ -784,18 +914,18 @@ class BindParameter(ColumnElement): _is_crud = False def __init__(self, key, value=NO_ARG, type_=None, - unique=False, required=NO_ARG, - quote=None, callable_=None, - isoutparam=False, - _compared_to_operator=None, - _compared_to_type=None): + unique=False, required=NO_ARG, + quote=None, callable_=None, + isoutparam=False, + _compared_to_operator=None, + _compared_to_type=None): """Produce a "bound expression". The return value is an instance of :class:`.BindParameter`; this is a :class:`.ColumnElement` subclass which represents a so-called - "placeholder" value in a SQL expression, the value of which is supplied - at the point at which the statement in executed against a database - connection. + "placeholder" value in a SQL expression, the value of which is + supplied at the point at which the statement in executed against a + database connection. In SQLAlchemy, the :func:`.bindparam` construct has the ability to carry along the actual value that will be ultimately @@ -831,27 +961,29 @@ def __init__(self, key, value=NO_ARG, type_=None, where the WHERE criterion of the statement is to change on each invocation, such as:: - stmt = users_table.update().\\ - where(user_table.c.name == bindparam('username')).\\ - values(fullname=bindparam('fullname')) + stmt = (users_table.update(). + where(user_table.c.name == bindparam('username')). + values(fullname=bindparam('fullname')) + ) - connection.execute(stmt, [ - {"username": "wendy", "fullname": "Wendy Smith"}, - {"username": "jack", "fullname": "Jack Jones"}, - ]) + connection.execute( + stmt, [{"username": "wendy", "fullname": "Wendy Smith"}, + {"username": "jack", "fullname": "Jack Jones"}, + ] + ) - SQLAlchemy's Core expression system makes wide use of :func:`.bindparam` - in an implicit sense. It is typical that Python literal values passed to - virtually all SQL expression functions are coerced into fixed - :func:`.bindparam` constructs. For example, given a comparison operation - such as:: + SQLAlchemy's Core expression system makes wide use of + :func:`.bindparam` in an implicit sense. It is typical that Python + literal values passed to virtually all SQL expression functions are + coerced into fixed :func:`.bindparam` constructs. For example, given + a comparison operation such as:: expr = users_table.c.name == 'Wendy' The above expression will produce a :class:`.BinaryExpression` - contruct, where the left side is the :class:`.Column` object - representing the ``name`` column, and the right side is a :class:`.BindParameter` - representing the literal value:: + construct, where the left side is the :class:`.Column` object + representing the ``name`` column, and the right side is a + :class:`.BindParameter` representing the literal value:: print(repr(expr.right)) BindParameter('%(4327771088 name)s', 'Wendy', type_=String()) @@ -882,8 +1014,8 @@ def __init__(self, key, value=NO_ARG, type_=None, Similarly, :func:`.bindparam` is invoked automatically when working with :term:`CRUD` statements as far as the "VALUES" portion is concerned. The :func:`.insert` construct produces an - ``INSERT`` expression which will, at statement execution time, generate - bound placeholders based on the arguments passed, as in:: + ``INSERT`` expression which will, at statement execution time, + generate bound placeholders based on the arguments passed, as in:: stmt = users_table.insert() result = connection.execute(stmt, name='Wendy') @@ -950,8 +1082,8 @@ def __init__(self, key, value=NO_ARG, type_=None, If ``True``, a value is required at execution time. If not passed, it defaults to ``True`` if neither :paramref:`.bindparam.value` or :paramref:`.bindparam.callable` were passed. If either of these - parameters are present, then :paramref:`.bindparam.required` defaults - to ``False``. + parameters are present, then :paramref:`.bindparam.required` + defaults to ``False``. .. versionchanged:: 0.8 If the ``required`` flag is not specified, it will be set automatically to ``True`` or ``False`` depending @@ -980,7 +1112,7 @@ def __init__(self, key, value=NO_ARG, type_=None, """ if isinstance(key, ColumnClause): type_ = key.type - key = key.name + key = key.key if required is NO_ARG: required = (value is NO_ARG and callable_ is None) if value is NO_ARG: @@ -991,10 +1123,10 @@ def __init__(self, key, value=NO_ARG, type_=None, if unique: self.key = _anonymous_label('%%(%d %s)s' % (id(self), key - or 'param')) + or 'param')) else: self.key = key or _anonymous_label('%%(%d param)s' - % id(self)) + % id(self)) # identifying key that won't change across # clones, used to identify the bind's logical @@ -1017,21 +1149,23 @@ def __init__(self, key, value=NO_ARG, type_=None, _compared_to_operator, value) else: self.type = type_api._type_map.get(type(value), - type_api.NULLTYPE) + type_api.NULLTYPE) elif isinstance(type_, type): self.type = type_() else: self.type = type_ def _with_value(self, value): - """Return a copy of this :class:`.BindParameter` with the given value set.""" + """Return a copy of this :class:`.BindParameter` with the given value + set. + """ cloned = self._clone() cloned.value = value cloned.callable = None cloned.required = False if cloned.type is type_api.NULLTYPE: cloned.type = type_api._type_map.get(type(value), - type_api.NULLTYPE) + type_api.NULLTYPE) return cloned @property @@ -1053,14 +1187,14 @@ def _clone(self): c = ClauseElement._clone(self) if self.unique: c.key = _anonymous_label('%%(%d %s)s' % (id(c), c._orig_key - or 'param')) + or 'param')) return c def _convert_to_unique(self): if not self.unique: self.unique = True - self.key = _anonymous_label('%%(%d %s)s' % (id(self), - self._orig_key or 'param')) + self.key = _anonymous_label( + '%%(%d %s)s' % (id(self), self._orig_key or 'param')) def compare(self, other, **kw): """Compare this :class:`BindParameter` to the given @@ -1083,7 +1217,7 @@ def __getstate__(self): def __repr__(self): return 'BindParameter(%r, %r, type_=%r)' % (self.key, - self.value, self.type) + self.value, self.type) class TypeClause(ClauseElement): @@ -1132,14 +1266,22 @@ def _select_iterable(self): @property def selectable(self): + # allows text() to be considered by + # _interpret_as_from return self _hide_froms = [] + # help in those cases where text() is + # interpreted in a column expression situation + key = _label = _resolve_label = None + + _allow_label_resolve = False + def __init__( - self, - text, - bind=None): + self, + text, + bind=None): self._bind = bind self._bindparams = {} @@ -1153,13 +1295,13 @@ def repl(m): @classmethod def _create_text(self, text, bind=None, bindparams=None, - typemap=None, autocommit=None): + typemap=None, autocommit=None): """Construct a new :class:`.TextClause` clause, representing a textual SQL string directly. E.g.:: - fom sqlalchemy import text + from sqlalchemy import text t = text("SELECT * FROM users") result = connection.execute(t) @@ -1187,10 +1329,10 @@ def _create_text(self, text, bind=None, bindparams=None, The :class:`.TextClause` construct includes methods which can provide information about the bound parameters as well as the column values which would be returned from the textual statement, assuming - it's an executable SELECT type of statement. The :meth:`.TextClause.bindparams` - method is used to provide bound parameter detail, and - :meth:`.TextClause.columns` method allows specification of - return columns including names and types:: + it's an executable SELECT type of statement. The + :meth:`.TextClause.bindparams` method is used to provide bound + parameter detail, and :meth:`.TextClause.columns` method allows + specification of return columns including names and types:: t = text("SELECT * FROM users WHERE id=:user_id").\\ bindparams(user_id=7).\\ @@ -1199,22 +1341,21 @@ def _create_text(self, text, bind=None, bindparams=None, for id, name in connection.execute(t): print(id, name) - The :func:`.text` construct is used internally in cases when - a literal string is specified for part of a larger query, such as - when a string is specified to the :meth:`.Select.where` method of - :class:`.Select`. In those cases, the same - bind parameter syntax is applied:: + The :func:`.text` construct is used in cases when + a literal string SQL fragment is specified as part of a larger query, + such as for the WHERE clause of a SELECT statement:: - s = select([users.c.id, users.c.name]).where("id=:user_id") + s = select([users.c.id, users.c.name]).where(text("id=:user_id")) result = connection.execute(s, user_id=12) - Using :func:`.text` explicitly usually implies the construction - of a full, standalone statement. As such, SQLAlchemy refers + :func:`.text` is also used for the construction + of a full, standalone statement using plain text. + As such, SQLAlchemy refers to it as an :class:`.Executable` object, and it supports the :meth:`Executable.execution_options` method. For example, a :func:`.text` construct that should be subject to "autocommit" - can be set explicitly so using the :paramref:`.Connection.execution_options.autocommit` - option:: + can be set explicitly so using the + :paramref:`.Connection.execution_options.autocommit` option:: t = text("EXEC my_procedural_thing()").\\ execution_options(autocommit=True) @@ -1259,9 +1400,10 @@ def _create_text(self, text, bind=None, bindparams=None, represented in the columns clause of a ``SELECT`` statement to type objects, which will be used to perform post-processing on columns within - the result set. This parameter now invokes the :meth:`.TextClause.columns` - method, which returns a :class:`.TextAsFrom` construct that gains - a ``.c`` collection and can be embedded in other expressions. E.g.:: + the result set. This parameter now invokes the + :meth:`.TextClause.columns` method, which returns a + :class:`.TextAsFrom` construct that gains a ``.c`` collection and + can be embedded in other expressions. E.g.:: stmt = text("SELECT * FROM table", typemap={'id': Integer, 'name': String}, @@ -1269,7 +1411,8 @@ def _create_text(self, text, bind=None, bindparams=None, Is equivalent to:: - stmt = text("SELECT * FROM table").columns(id=Integer, name=String) + stmt = text("SELECT * FROM table").columns(id=Integer, + name=String) Or alternatively:: @@ -1322,8 +1465,8 @@ def bindparams(self, *binds, **names_to_values): When specific typing behavior is needed, the positional ``*binds`` argument can be used in which to specify :func:`.bindparam` constructs - directly. These constructs must include at least the ``key`` argument, - then an optional value and type:: + directly. These constructs must include at least the ``key`` + argument, then an optional value and type:: from sqlalchemy import bindparam stmt = stmt.bindparams( @@ -1331,9 +1474,10 @@ def bindparams(self, *binds, **names_to_values): bindparam('timestamp', type_=DateTime) ) - Above, we specified the type of :class:`.DateTime` for the ``timestamp`` - bind, and the type of :class:`.String` for the ``name`` bind. In - the case of ``name`` we also set the default value of ``"jack"``. + Above, we specified the type of :class:`.DateTime` for the + ``timestamp`` bind, and the type of :class:`.String` for the ``name`` + bind. In the case of ``name`` we also set the default value of + ``"jack"``. Additional bound parameters can be supplied at statement execution time, e.g.:: @@ -1341,26 +1485,27 @@ def bindparams(self, *binds, **names_to_values): result = connection.execute(stmt, timestamp=datetime.datetime(2012, 10, 8, 15, 12, 5)) - The :meth:`.TextClause.bindparams` method can be called repeatedly, where - it will re-use existing :class:`.BindParameter` objects to add new information. - For example, we can call :meth:`.TextClause.bindparams` first with - typing information, and a second time with value information, and it - will be combined:: + The :meth:`.TextClause.bindparams` method can be called repeatedly, + where it will re-use existing :class:`.BindParameter` objects to add + new information. For example, we can call + :meth:`.TextClause.bindparams` first with typing information, and a + second time with value information, and it will be combined:: stmt = text("SELECT id, name FROM user WHERE name=:name " "AND timestamp=:timestamp") stmt = stmt.bindparams( - bindparam('name', type_=String), - bindparam('timestamp', type_=DateTime) - ) + bindparam('name', type_=String), + bindparam('timestamp', type_=DateTime) + ) stmt = stmt.bindparams( - name='jack', - timestamp=datetime.datetime(2012, 10, 8, 15, 12, 5) - ) + name='jack', + timestamp=datetime.datetime(2012, 10, 8, 15, 12, 5) + ) - .. versionadded:: 0.9.0 The :meth:`.TextClause.bindparams` method supersedes - the argument ``bindparams`` passed to :func:`~.expression.text`. + .. versionadded:: 0.9.0 The :meth:`.TextClause.bindparams` method + supersedes the argument ``bindparams`` passed to + :func:`~.expression.text`. """ @@ -1371,8 +1516,8 @@ def bindparams(self, *binds, **names_to_values): existing = new_params[bind.key] except KeyError: raise exc.ArgumentError( - "This text() construct doesn't define a " - "bound parameter named %r" % bind.key) + "This text() construct doesn't define a " + "bound parameter named %r" % bind.key) else: new_params[existing.key] = bind @@ -1381,13 +1526,11 @@ def bindparams(self, *binds, **names_to_values): existing = new_params[key] except KeyError: raise exc.ArgumentError( - "This text() construct doesn't define a " - "bound parameter named %r" % key) + "This text() construct doesn't define a " + "bound parameter named %r" % key) else: new_params[key] = existing._with_value(value) - - @util.dependencies('sqlalchemy.sql.selectable') def columns(self, selectable, *cols, **types): """Turn this :class:`.TextClause` object into a :class:`.TextAsFrom` @@ -1408,8 +1551,8 @@ def columns(self, selectable, *cols, **types): ).where(stmt.c.id > 5) Above, we used untyped :func:`.column` elements. These can also have - types specified, which will impact how the column behaves in expressions - as well as determining result set behavior:: + types specified, which will impact how the column behaves in + expressions as well as determining result set behavior:: stmt = text("SELECT id, name, timestamp FROM some_table") stmt = stmt.columns( @@ -1421,8 +1564,9 @@ def columns(self, selectable, *cols, **types): for id, name, timestamp in connection.execute(stmt): print(id, name, timestamp) - Keyword arguments allow just the names and types of columns to be specified, - where the :func:`.column` elements will be generated automatically:: + Keyword arguments allow just the names and types of columns to be + specified, where the :func:`.column` elements will be generated + automatically:: stmt = text("SELECT id, name, timestamp FROM some_table") stmt = stmt.columns( @@ -1435,24 +1579,24 @@ def columns(self, selectable, *cols, **types): print(id, name, timestamp) The :meth:`.TextClause.columns` method provides a direct - route to calling :meth:`.FromClause.alias` as well as :meth:`.SelectBase.cte` - against a textual SELECT statement:: + route to calling :meth:`.FromClause.alias` as well as + :meth:`.SelectBase.cte` against a textual SELECT statement:: stmt = stmt.columns(id=Integer, name=String).cte('st') stmt = select([sometable]).where(sometable.c.id == stmt.c.id) - .. versionadded:: 0.9.0 :func:`.text` can now be converted into a fully - featured "selectable" construct using the :meth:`.TextClause.columns` - method. This method supersedes the ``typemap`` argument to - :func:`.text`. + .. versionadded:: 0.9.0 :func:`.text` can now be converted into a + fully featured "selectable" construct using the + :meth:`.TextClause.columns` method. This method supersedes the + ``typemap`` argument to :func:`.text`. """ input_cols = [ ColumnClause(col.key, types.pop(col.key)) - if col.key in types - else col + if col.key in types + else col for col in cols ] + [ColumnClause(key, type_) for key, type_ in types.items()] return selectable.TextAsFrom(self, input_cols) @@ -1473,11 +1617,14 @@ def self_group(self, against=None): def _copy_internals(self, clone=_clone, **kw): self._bindparams = dict((b.key, clone(b, **kw)) - for b in self._bindparams.values()) + for b in self._bindparams.values()) def get_children(self, **kwargs): return list(self._bindparams.values()) + def compare(self, other): + return isinstance(other, TextClause) and other.text == self.text + class Null(ColumnElement): """Represent the NULL keyword in a SQL statement. @@ -1494,10 +1641,10 @@ def type(self): return type_api.NULLTYPE @classmethod - def _singleton(cls): + def _instance(cls): """Return a constant :class:`.Null` construct.""" - return NULL + return Null() def compare(self, other): return isinstance(other, Null) @@ -1518,11 +1665,11 @@ def type(self): return type_api.BOOLEANTYPE def _negate(self): - return TRUE + return True_() @classmethod - def _singleton(cls): - """Return a constant :class:`.False_` construct. + def _instance(cls): + """Return a :class:`.False_` construct. E.g.:: @@ -1556,11 +1703,12 @@ def _singleton(cls): """ - return FALSE + return False_() def compare(self, other): return isinstance(other, False_) + class True_(ColumnElement): """Represent the ``true`` keyword, or equivalent, in a SQL statement. @@ -1576,17 +1724,17 @@ def type(self): return type_api.BOOLEANTYPE def _negate(self): - return FALSE + return False_() @classmethod def _ifnone(cls, other): if other is None: - return cls._singleton() + return cls._instance() else: return other @classmethod - def _singleton(cls): + def _instance(cls): """Return a constant :class:`.True_` construct. E.g.:: @@ -1621,14 +1769,11 @@ def _singleton(cls): """ - return TRUE + return True_() def compare(self, other): return isinstance(other, True_) -NULL = Null() -FALSE = False_() -TRUE = True_() class ClauseList(ClauseElement): """Describe a list of clauses, separated by an operator. @@ -1642,13 +1787,16 @@ def __init__(self, *clauses, **kwargs): self.operator = kwargs.pop('operator', operators.comma_op) self.group = kwargs.pop('group', True) self.group_contents = kwargs.pop('group_contents', True) + text_converter = kwargs.pop( + '_literal_as_text', + _expression_literal_as_text) if self.group_contents: self.clauses = [ - _literal_as_text(clause).self_group(against=self.operator) + text_converter(clause).self_group(against=self.operator) for clause in clauses] else: self.clauses = [ - _literal_as_text(clause) + text_converter(clause) for clause in clauses] def __iter__(self): @@ -1663,7 +1811,7 @@ def _select_iterable(self): def append(self, clause): if self.group_contents: - self.clauses.append(_literal_as_text(clause).\ + self.clauses.append(_literal_as_text(clause). self_group(against=self.operator)) else: self.clauses.append(_literal_as_text(clause)) @@ -1702,21 +1850,23 @@ def compare(self, other, **kw): return False - class BooleanClauseList(ClauseList, ColumnElement): __visit_name__ = 'clauselist' def __init__(self, *arg, **kw): raise NotImplementedError( - "BooleanClauseList has a private constructor") + "BooleanClauseList has a private constructor") @classmethod def _construct(cls, operator, continue_on, skip_on, *clauses, **kw): convert_clauses = [] - clauses = util.coerce_generator_arg(clauses) + clauses = [ + _expression_literal_as_text(clause) + for clause in + util.coerce_generator_arg(clauses) + ] for clause in clauses: - clause = _literal_as_text(clause) if isinstance(clause, continue_on): continue @@ -1731,7 +1881,7 @@ def _construct(cls, operator, continue_on, skip_on, *clauses, **kw): return clauses[0].self_group(against=operators._asbool) convert_clauses = [c.self_group(against=operator) - for c in convert_clauses] + for c in convert_clauses] self = cls.__new__(cls) self.clauses = convert_clauses @@ -1831,6 +1981,7 @@ def _negate(self): and_ = BooleanClauseList.and_ or_ = BooleanClauseList.or_ + class Tuple(ClauseList, ColumnElement): """Represent a SQL tuple.""" @@ -1858,7 +2009,7 @@ def __init__(self, *clauses, **kw): clauses = [_literal_as_binds(c) for c in clauses] self._type_tuple = [arg.type for arg in clauses] self.type = kw.pop('type_', self._type_tuple[0] - if self._type_tuple else type_api.NULLTYPE) + if self._type_tuple else type_api.NULLTYPE) super(Tuple, self).__init__(*clauses, **kw) @@ -1869,7 +2020,7 @@ def _select_iterable(self): def _bind_param(self, operator, obj): return Tuple(*[ BindParameter(None, o, _compared_to_operator=operator, - _compared_to_type=type_, unique=True) + _compared_to_type=type_, unique=True) for o, type_ in zip(obj, self._type_tuple) ]).self_group() @@ -1911,7 +2062,7 @@ def __init__(self, whens, value=None, else_=None): languages. It returns an instance of :class:`.Case`. :func:`.case` in its usual form is passed a list of "when" - contructs, that is, a list of conditions and results as tuples:: + constructs, that is, a list of conditions and results as tuples:: from sqlalchemy import case @@ -1940,9 +2091,9 @@ def __init__(self, whens, value=None, else_=None): used via the :paramref:`.case.value` parameter, which is passed a column expression to be compared. In this form, the :paramref:`.case.whens` - parameter is passed as a dictionary containing expressions to be compared - against keyed to result expressions. The statement below is equivalent - to the preceding statement:: + parameter is passed as a dictionary containing expressions to be + compared against keyed to result expressions. The statement below is + equivalent to the preceding statement:: stmt = select([users_table]).\\ where( @@ -1987,23 +2138,24 @@ def __init__(self, whens, value=None, else_=None): ELSE 'lessthan10' END - :param whens: The criteria to be compared against, :paramref:`.case.whens` - accepts two different forms, based on whether or not :paramref:`.case.value` - is used. + :param whens: The criteria to be compared against, + :paramref:`.case.whens` accepts two different forms, based on + whether or not :paramref:`.case.value` is used. - In the first form, it accepts a list of 2-tuples; each 2-tuple consists - of ``(, )``, where the SQL expression is a - boolean expression and "value" is a resulting value, e.g.:: + In the first form, it accepts a list of 2-tuples; each 2-tuple + consists of ``(, )``, where the SQL + expression is a boolean expression and "value" is a resulting value, + e.g.:: case([ (users_table.c.name == 'wendy', 'W'), (users_table.c.name == 'jack', 'J') ]) - In the second form, it accepts a Python dictionary of comparison values - mapped to a resulting value; this form requires :paramref:`.case.value` - to be present, and values will be compared using the ``==`` operator, - e.g.:: + In the second form, it accepts a Python dictionary of comparison + values mapped to a resulting value; this form requires + :paramref:`.case.value` to be present, and values will be compared + using the ``==`` operator, e.g.:: case( {"wendy": "W", "jack": "J"}, @@ -2018,7 +2170,7 @@ def __init__(self, whens, value=None, else_=None): result of the ``CASE`` construct if all expressions within :paramref:`.case.whens` evaluate to false. When omitted, most databases will produce a result of NULL if none of the "when" - expressions evaulate to true. + expressions evaluate to true. """ @@ -2031,12 +2183,12 @@ def __init__(self, whens, value=None, else_=None): if value is not None: whenlist = [ (_literal_as_binds(c).self_group(), - _literal_as_binds(r)) for (c, r) in whens + _literal_as_binds(r)) for (c, r) in whens ] else: whenlist = [ (_no_literals(c).self_group(), - _literal_as_binds(r)) for (c, r) in whens + _literal_as_binds(r)) for (c, r) in whens ] if whenlist: @@ -2060,7 +2212,7 @@ def _copy_internals(self, clone=_clone, **kw): if self.value is not None: self.value = clone(self.value, **kw) self.whens = [(clone(x, **kw), clone(y, **kw)) - for x, y in self.whens] + for x, y in self.whens] if self.else_ is not None: self.else_ = clone(self.else_, **kw) @@ -2076,18 +2228,19 @@ def get_children(self, **kwargs): @property def _from_objects(self): return list(itertools.chain(*[x._from_objects for x in - self.get_children()])) + self.get_children()])) def literal_column(text, type_=None): - """Return a textual column expression, as would be in the columns - clause of a ``SELECT`` statement. - - The object returned supports further expressions in the same way as any - other column object, including comparison, math and string operations. - The type\_ parameter is important to determine proper expression behavior - (such as, '+' means string concatenation or numerical addition based on - the type). + """Produce a :class:`.ColumnClause` object that has the + :paramref:`.column.is_literal` flag set to True. + + :func:`.literal_column` is similar to :func:`.column`, except that + it is more often used as a "standalone" column expression that renders + exactly as stated; while :func:`.column` stores a string name that + will be assumed to be part of a table and may be quoted as such, + :func:`.literal_column` can be that, or any other arbitrary column-oriented + expression. :param text: the text of the expression; can be any SQL expression. Quoting rules will not be applied. To specify a column-name expression @@ -2099,11 +2252,18 @@ def literal_column(text, type_=None): provide result-set translation and additional expression semantics for this column. If left as None the type will be NullType. + .. seealso:: + + :func:`.column` + + :func:`.text` + + :ref:`sqlexpression_literal_column` + """ return ColumnClause(text, type_=type_, is_literal=True) - class Cast(ColumnElement): """Represent a ``CAST`` expression. @@ -2219,6 +2379,42 @@ def _from_objects(self): return self.expr._from_objects +class _label_reference(ColumnElement): + """Wrap a column expression as it appears in a 'reference' context. + + This expression is any that inclues an _order_by_label_element, + which is a Label, or a DESC / ASC construct wrapping a Label. + + The production of _label_reference() should occur when an expression + is added to this context; this includes the ORDER BY or GROUP BY of a + SELECT statement, as well as a few other places, such as the ORDER BY + within an OVER clause. + + """ + __visit_name__ = 'label_reference' + + def __init__(self, element): + self.element = element + + def _copy_internals(self, clone=_clone, **kw): + self.element = clone(self.element, **kw) + + @property + def _from_objects(self): + return () + + +class _textual_label_reference(ColumnElement): + __visit_name__ = 'textual_label_reference' + + def __init__(self, element): + self.element = element + + @util.memoized_property + def _text_clause(self): + return TextClause._create_text(self.element) + + class UnaryExpression(ColumnElement): """Define a 'unary' expression. @@ -2235,12 +2431,14 @@ class UnaryExpression(ColumnElement): __visit_name__ = 'unary' def __init__(self, element, operator=None, modifier=None, - type_=None, negate=None): + type_=None, negate=None, wraps_column_expression=False): self.operator = operator self.modifier = modifier - self.element = element.self_group(against=self.operator or self.modifier) + self.element = element.self_group( + against=self.operator or self.modifier) self.type = type_api.to_instance(type_) self.negate = negate + self.wraps_column_expression = wraps_column_expression @classmethod def _create_nullsfirst(cls, column): @@ -2261,11 +2459,13 @@ def _create_nullsfirst(cls, column): SELECT id, name FROM user ORDER BY name DESC NULLS FIRST Like :func:`.asc` and :func:`.desc`, :func:`.nullsfirst` is typically - invoked from the column expression itself using :meth:`.ColumnElement.nullsfirst`, - rather than as its standalone function version, as in:: + invoked from the column expression itself using + :meth:`.ColumnElement.nullsfirst`, rather than as its standalone + function version, as in:: - stmt = select([users_table]).\\ - order_by(users_table.c.name.desc().nullsfirst()) + stmt = (select([users_table]). + order_by(users_table.c.name.desc().nullsfirst()) + ) .. seealso:: @@ -2279,8 +2479,9 @@ def _create_nullsfirst(cls, column): """ return UnaryExpression( - _literal_as_text(column), modifier=operators.nullsfirst_op) - + _literal_as_label_reference(column), + modifier=operators.nullsfirst_op, + wraps_column_expression=False) @classmethod def _create_nullslast(cls, column): @@ -2301,8 +2502,9 @@ def _create_nullslast(cls, column): SELECT id, name FROM user ORDER BY name DESC NULLS LAST Like :func:`.asc` and :func:`.desc`, :func:`.nullslast` is typically - invoked from the column expression itself using :meth:`.ColumnElement.nullslast`, - rather than as its standalone function version, as in:: + invoked from the column expression itself using + :meth:`.ColumnElement.nullslast`, rather than as its standalone + function version, as in:: stmt = select([users_table]).\\ order_by(users_table.c.name.desc().nullslast()) @@ -2319,8 +2521,9 @@ def _create_nullslast(cls, column): """ return UnaryExpression( - _literal_as_text(column), modifier=operators.nullslast_op) - + _literal_as_label_reference(column), + modifier=operators.nullslast_op, + wraps_column_expression=False) @classmethod def _create_desc(cls, column): @@ -2358,7 +2561,9 @@ def _create_desc(cls, column): """ return UnaryExpression( - _literal_as_text(column), modifier=operators.desc_op) + _literal_as_label_reference(column), + modifier=operators.desc_op, + wraps_column_expression=False) @classmethod def _create_asc(cls, column): @@ -2395,7 +2600,9 @@ def _create_asc(cls, column): """ return UnaryExpression( - _literal_as_text(column), modifier=operators.asc_op) + _literal_as_label_reference(column), + modifier=operators.asc_op, + wraps_column_expression=False) @classmethod def _create_distinct(cls, expr): @@ -2434,10 +2641,11 @@ def _create_distinct(cls, expr): """ expr = _literal_as_binds(expr) - return UnaryExpression(expr, - operator=operators.distinct_op, type_=expr.type) + return UnaryExpression( + expr, operator=operators.distinct_op, + type_=expr.type, wraps_column_expression=False) - @util.memoized_property + @property def _order_by_label_element(self): if self.modifier in (operators.desc_op, operators.asc_op): return self.element._order_by_label_element @@ -2472,7 +2680,15 @@ def _negate(self): operator=self.negate, negate=self.operator, modifier=self.modifier, - type_=self.type) + type_=self.type, + wraps_column_expression=self.wraps_column_expression) + elif self.type._type_affinity is type_api.BOOLEANTYPE._type_affinity: + return UnaryExpression( + self.self_group(against=operators.inv), + operator=operators.inv, + type_=type_api.BOOLEANTYPE, + wraps_column_expression=self.wraps_column_expression, + negate=None) else: return ClauseElement._negate(self) @@ -2491,11 +2707,15 @@ def __init__(self, element, operator, negate): self.operator = operator self.negate = negate self.modifier = None + self.wraps_column_expression = True def self_group(self, against=None): return self def _negate(self): + # TODO: this assumes the element is the True_() or False_() + # object, but this assumption isn't enforced and + # ColumnElement._negate() can send any number of expressions here return self.element._negate() @@ -2503,7 +2723,7 @@ class BinaryExpression(ColumnElement): """Represent an expression that is ``LEFT RIGHT``. A :class:`.BinaryExpression` is generated automatically - whenever two column expressions are used in a Python binary expresion:: + whenever two column expressions are used in a Python binary expression:: >>> from sqlalchemy.sql import column >>> column('a') + column('b') @@ -2516,7 +2736,7 @@ class BinaryExpression(ColumnElement): __visit_name__ = 'binary' def __init__(self, left, right, operator, type_=None, - negate=None, modifiers=None): + negate=None, modifiers=None): # allow compatibility with libraries that # refer to BinaryExpression directly and pass strings if isinstance(operator, util.string_types): @@ -2587,14 +2807,12 @@ def _negate(self): self.right, self.negate, negate=self.operator, - type_=type_api.BOOLEANTYPE, + type_=self.type, modifiers=self.modifiers) else: return super(BinaryExpression, self)._negate() - - class Grouping(ColumnElement): """Represent a grouping within a column expression""" @@ -2607,6 +2825,10 @@ def __init__(self, element): def self_group(self, against=None): return self + @property + def _key_label(self): + return self._label + @property def _label(self): return getattr(self.element, '_label', None) or self.anon_label @@ -2681,9 +2903,13 @@ def __init__(self, func, partition_by=None, order_by=None): """ self.func = func if order_by is not None: - self.order_by = ClauseList(*util.to_list(order_by)) + self.order_by = ClauseList( + *util.to_list(order_by), + _literal_as_text=_literal_as_label_reference) if partition_by is not None: - self.partition_by = ClauseList(*util.to_list(partition_by)) + self.partition_by = ClauseList( + *util.to_list(partition_by), + _literal_as_text=_literal_as_label_reference) @util.memoized_property def type(self): @@ -2706,7 +2932,121 @@ def _from_objects(self): return list(itertools.chain( *[c._from_objects for c in (self.func, self.partition_by, self.order_by) - if c is not None] + if c is not None] + )) + + +class FunctionFilter(ColumnElement): + """Represent a function FILTER clause. + + This is a special operator against aggregate and window functions, + which controls which rows are passed to it. + It's supported only by certain database backends. + + Invocation of :class:`.FunctionFilter` is via + :meth:`.FunctionElement.filter`:: + + func.count(1).filter(True) + + .. versionadded:: 1.0.0 + + .. seealso:: + + :meth:`.FunctionElement.filter` + + """ + __visit_name__ = 'funcfilter' + + criterion = None + + def __init__(self, func, *criterion): + """Produce a :class:`.FunctionFilter` object against a function. + + Used against aggregate and window functions, + for database backends that support the "FILTER" clause. + + E.g.:: + + from sqlalchemy import funcfilter + funcfilter(func.count(1), MyClass.name == 'some name') + + Would produce "COUNT(1) FILTER (WHERE myclass.name = 'some name')". + + This function is also available from the :data:`~.expression.func` + construct itself via the :meth:`.FunctionElement.filter` method. + + .. versionadded:: 1.0.0 + + .. seealso:: + + :meth:`.FunctionElement.filter` + + + """ + self.func = func + self.filter(*criterion) + + def filter(self, *criterion): + """Produce an additional FILTER against the function. + + This method adds additional criteria to the initial criteria + set up by :meth:`.FunctionElement.filter`. + + Multiple criteria are joined together at SQL render time + via ``AND``. + + + """ + + for criterion in list(criterion): + criterion = _expression_literal_as_text(criterion) + + if self.criterion is not None: + self.criterion = self.criterion & criterion + else: + self.criterion = criterion + + return self + + def over(self, partition_by=None, order_by=None): + """Produce an OVER clause against this filtered function. + + Used against aggregate or so-called "window" functions, + for database backends that support window functions. + + The expression:: + + func.rank().filter(MyClass.y > 5).over(order_by='x') + + is shorthand for:: + + from sqlalchemy import over, funcfilter + over(funcfilter(func.rank(), MyClass.y > 5), order_by='x') + + See :func:`~.expression.over` for a full description. + + """ + return Over(self, partition_by=partition_by, order_by=order_by) + + @util.memoized_property + def type(self): + return self.func.type + + def get_children(self, **kwargs): + return [c for c in + (self.func, self.criterion) + if c is not None] + + def _copy_internals(self, clone=_clone, **kw): + self.func = clone(self.func, **kw) + if self.criterion is not None: + self.criterion = clone(self.criterion, **kw) + + @property + def _from_objects(self): + return list(itertools.chain( + *[c._from_objects for c in (self.func, self.criterion) + if c is not None] )) @@ -2735,13 +3075,21 @@ def __init__(self, name, element, type_=None): :param obj: a :class:`.ColumnElement`. """ + + if isinstance(element, Label): + self._resolve_label = element._label + while isinstance(element, Label): element = element.element + if name: self.name = name + self._resolve_label = self.name else: - self.name = _anonymous_label('%%(%d %s)s' % (id(self), - getattr(element, 'name', 'anon'))) + self.name = _anonymous_label( + '%%(%d %s)s' % (id(self), getattr(element, 'name', 'anon')) + ) + self.key = self._label = self._key_label = self.name self._element = element self._type = type_ @@ -2751,14 +3099,18 @@ def __reduce__(self): return self.__class__, (self.name, self._element, self._type) @util.memoized_property + def _allow_label_resolve(self): + return self.element._allow_label_resolve + + @property def _order_by_label_element(self): return self @util.memoized_property def type(self): return type_api.to_instance( - self._type or getattr(self._element, 'type', None) - ) + self._type or getattr(self._element, 'type', None) + ) @util.memoized_property def element(self): @@ -2768,8 +3120,8 @@ def self_group(self, against=None): sub_element = self._element.self_group(against=against) if sub_element is not self._element: return Label(self.name, - sub_element, - type_=self._type) + sub_element, + type_=self._type) else: return self @@ -2784,8 +3136,16 @@ def foreign_keys(self): def get_children(self, **kwargs): return self.element, - def _copy_internals(self, clone=_clone, **kw): - self.element = clone(self.element, **kw) + def _copy_internals(self, clone=_clone, anonymize_labels=False, **kw): + self._element = clone(self._element, **kw) + self.__dict__.pop('element', None) + self.__dict__.pop('_allow_label_resolve', None) + if anonymize_labels: + self.name = self._resolve_label = _anonymous_label( + '%%(%d %s)s' % ( + id(self), getattr(self.element, 'name', 'anon')) + ) + self.key = self._label = self._key_label = self.name @property def _from_objects(self): @@ -2793,7 +3153,7 @@ def _from_objects(self): def _make_proxy(self, selectable, name=None, **kw): e = self.element._make_proxy(selectable, - name=name if name else self.name) + name=name if name else self.name) e._proxies.append(self) if self._type is not None: e.type = self._type @@ -2807,7 +3167,7 @@ class ColumnClause(Immutable, ColumnElement): :class:`.Column` class, is typically invoked using the :func:`.column` function, as in:: - from sqlalchemy.sql import column + from sqlalchemy import column id, name = column("id"), column("name") stmt = select([id, name]).select_from("user") @@ -2820,10 +3180,10 @@ class ColumnClause(Immutable, ColumnElement): :class:`.Column` object. While the :class:`.Column` class has all the same capabilities as :class:`.ColumnClause`, the :class:`.ColumnClause` class is usable by itself in those cases where behavioral requirements - are limited to simple SQL expression generation. The object has none of the - associations with schema-level metadata or with execution-time behavior - that :class:`.Column` does, so in that sense is a "lightweight" version - of :class:`.Column`. + are limited to simple SQL expression generation. The object has none of + the associations with schema-level metadata or with execution-time + behavior that :class:`.Column` does, so in that sense is a "lightweight" + version of :class:`.Column`. Full details on :class:`.ColumnClause` usage is at :func:`.column`. @@ -2847,7 +3207,7 @@ def __init__(self, text, type_=None, is_literal=False, _selectable=None): :class:`.Column` class. The :func:`.column` function can be invoked with just a name alone, as in:: - from sqlalchemy.sql import column + from sqlalchemy import column id, name = column("id"), column("name") stmt = select([id, name]).select_from("user") @@ -2856,8 +3216,8 @@ def __init__(self, text, type_=None, is_literal=False, _selectable=None): SELECT id, name FROM user - Once constructed, :func:`.column` may be used like any other SQL expression - element such as within :func:`.select` constructs:: + Once constructed, :func:`.column` may be used like any other SQL + expression element such as within :func:`.select` constructs:: from sqlalchemy.sql import column @@ -2879,7 +3239,7 @@ def __init__(self, text, type_=None, is_literal=False, _selectable=None): (which is the lightweight analogue to :class:`.Table`) to produce a working table construct with minimal boilerplate:: - from sqlalchemy.sql import table, column + from sqlalchemy import table, column, select user = table("user", column("id"), @@ -2891,8 +3251,13 @@ def __init__(self, text, type_=None, is_literal=False, _selectable=None): A :func:`.column` / :func:`.table` construct like that illustrated above can be created in an - ad-hoc fashion and is not associated with any :class:`.schema.MetaData`, - DDL, or events, unlike its :class:`.Table` counterpart. + ad-hoc fashion and is not associated with any + :class:`.schema.MetaData`, DDL, or events, unlike its + :class:`.Table` counterpart. + + .. versionchanged:: 1.0.0 :func:`.expression.column` can now + be imported from the plain ``sqlalchemy`` namespace like any + other SQL element. :param text: the text of the element. @@ -2902,8 +3267,8 @@ def __init__(self, text, type_=None, is_literal=False, _selectable=None): :param is_literal: if True, the :class:`.ColumnClause` is assumed to be an exact expression that will be delivered to the output with no quoting rules applied regardless of case sensitive settings. the - :func:`.literal_column()` function essentially invokes :func:`.column` - while passing ``is_literal=True``. + :func:`.literal_column()` function essentially invokes + :func:`.column` while passing ``is_literal=True``. .. seealso:: @@ -2911,9 +3276,11 @@ def __init__(self, text, type_=None, is_literal=False, _selectable=None): :func:`.literal_column` + :func:`.table` + :func:`.text` - :ref:`metadata_toplevel` + :ref:`sqlexpression_literal_column` """ @@ -2924,13 +3291,13 @@ def __init__(self, text, type_=None, is_literal=False, _selectable=None): def _compare_name_for_result(self, other): if self.is_literal or \ - self.table is None or self.table._textual or \ - not hasattr(other, 'proxy_set') or ( - isinstance(other, ColumnClause) and - (other.is_literal or - other.table is None or - other.table._textual) - ): + self.table is None or self.table._textual or \ + not hasattr(other, 'proxy_set') or ( + isinstance(other, ColumnClause) and + (other.is_literal or + other.table is None or + other.table._textual) + ): return (hasattr(other, 'name') and self.name == other.name) or \ (hasattr(other, '_label') and self._label == other._label) else: @@ -2970,6 +3337,10 @@ def _key_label(self): def _label(self): return self._gen_label(self.name) + @_memoized_property + def _render_label_in_columns_clause(self): + return self.table is not None + def _gen_label(self, name): t = self.table @@ -2979,7 +3350,7 @@ def _gen_label(self, name): elif t is not None and t.named_with_column: if getattr(t, 'schema', None): label = t.schema.replace('.', '_') + "_" + \ - t.name + "_" + name + t.name + "_" + name else: label = t.name + "_" + name @@ -3011,24 +3382,24 @@ def _gen_label(self, name): return name def _bind_param(self, operator, obj): - return BindParameter(self.name, obj, - _compared_to_operator=operator, - _compared_to_type=self.type, - unique=True) + return BindParameter(self.key, obj, + _compared_to_operator=operator, + _compared_to_type=self.type, + unique=True) def _make_proxy(self, selectable, name=None, attach=True, - name_is_truncatable=False, **kw): + name_is_truncatable=False, **kw): # propagate the "is_literal" flag only if we are keeping our name, # otherwise its considered to be a label is_literal = self.is_literal and (name is None or name == self.name) c = self._constructor( - _as_truncated(name or self.name) if \ - name_is_truncatable else \ - (name or self.name), - type_=self.type, - _selectable=selectable, - is_literal=is_literal - ) + _as_truncated(name or self.name) if + name_is_truncatable else + (name or self.name), + type_=self.type, + _selectable=selectable, + is_literal=is_literal + ) if name is None: c.key = self.key c._proxies = [self] @@ -3063,7 +3434,7 @@ class ReleaseSavepointClause(_IdentifiedClause): __visit_name__ = 'release_savepoint' -class quoted_name(util.text_type): +class quoted_name(util.MemoizedSlots, util.text_type): """Represent a SQL identifier combined with quoting preferences. :class:`.quoted_name` is a Python unicode/str subclass which @@ -3087,11 +3458,11 @@ class quoted_name(util.text_type): such a backend. The :class:`.quoted_name` object is normally created automatically - when specifying the name for key schema constructs such as :class:`.Table`, - :class:`.Column`, and others. The class can also be passed explicitly - as the name to any function that receives a name which can be quoted. - Such as to use the :meth:`.Engine.has_table` method with an unconditionally - quoted name:: + when specifying the name for key schema constructs such as + :class:`.Table`, :class:`.Column`, and others. The class can also be + passed explicitly as the name to any function that receives a name which + can be quoted. Such as to use the :meth:`.Engine.has_table` method with + an unconditionally quoted name:: from sqlaclchemy import create_engine from sqlalchemy.sql.elements import quoted_name @@ -3107,6 +3478,8 @@ class quoted_name(util.text_type): """ + __slots__ = 'quote', 'lower', 'upper' + def __new__(cls, value, quote): if value is None: return None @@ -3116,8 +3489,8 @@ def __new__(cls, value, quote): # elif not sprcls and quote is None: # return value elif isinstance(value, cls) and ( - quote is None or value.quote == quote - ): + quote is None or value.quote == quote + ): return value self = super(quoted_name, cls).__new__(cls, value) self.quote = quote @@ -3126,15 +3499,13 @@ def __new__(cls, value, quote): def __reduce__(self): return quoted_name, (util.text_type(self), self.quote) - @util.memoized_instancemethod - def lower(self): + def _memoized_method_lower(self): if self.quote: return self else: return util.text_type(self).lower() - @util.memoized_instancemethod - def upper(self): + def _memoized_method_upper(self): if self.quote: return self else: @@ -3146,13 +3517,16 @@ def __repr__(self): backslashed = backslashed.decode('ascii') return "'%s'" % backslashed + class _truncated_label(quoted_name): """A unicode subclass used to identify symbolic " "names that may require truncation.""" + __slots__ = () + def __new__(cls, value, quote=None): quote = getattr(value, "quote", quote) - #return super(_truncated_label, cls).__new__(cls, value, quote, True) + # return super(_truncated_label, cls).__new__(cls, value, quote, True) return super(_truncated_label, cls).__new__(cls, value, quote) def __reduce__(self): @@ -3161,6 +3535,79 @@ def __reduce__(self): def apply_map(self, map_): return self + +class conv(_truncated_label): + """Mark a string indicating that a name has already been converted + by a naming convention. + + This is a string subclass that indicates a name that should not be + subject to any further naming conventions. + + E.g. when we create a :class:`.Constraint` using a naming convention + as follows:: + + m = MetaData(naming_convention={ + "ck": "ck_%(table_name)s_%(constraint_name)s" + }) + t = Table('t', m, Column('x', Integer), + CheckConstraint('x > 5', name='x5')) + + The name of the above constraint will be rendered as ``"ck_t_x5"``. + That is, the existing name ``x5`` is used in the naming convention as the + ``constraint_name`` token. + + In some situations, such as in migration scripts, we may be rendering + the above :class:`.CheckConstraint` with a name that's already been + converted. In order to make sure the name isn't double-modified, the + new name is applied using the :func:`.schema.conv` marker. We can + use this explicitly as follows:: + + + m = MetaData(naming_convention={ + "ck": "ck_%(table_name)s_%(constraint_name)s" + }) + t = Table('t', m, Column('x', Integer), + CheckConstraint('x > 5', name=conv('ck_t_x5'))) + + Where above, the :func:`.schema.conv` marker indicates that the constraint + name here is final, and the name will render as ``"ck_t_x5"`` and not + ``"ck_t_ck_t_x5"`` + + .. versionadded:: 0.9.4 + + .. seealso:: + + :ref:`constraint_naming_conventions` + + """ + __slots__ = () + + +class _defer_name(_truncated_label): + """mark a name as 'deferred' for the purposes of automated name + generation. + + """ + __slots__ = () + + def __new__(cls, value): + if value is None: + return _NONE_NAME + elif isinstance(value, conv): + return value + else: + return super(_defer_name, cls).__new__(cls, value) + + def __reduce__(self): + return self.__class__, (util.text_type(self), ) + + +class _defer_none_name(_defer_name): + """indicate a 'deferred' name that was ultimately the value None.""" + __slots__ = () + +_NONE_NAME = _defer_none_name("_unnamed_") + # for backwards compatibility in case # someone is re-implementing the # _truncated_identifier() sequence in a custom @@ -3172,19 +3619,21 @@ class _anonymous_label(_truncated_label): """A unicode subclass used to identify anonymously generated names.""" + __slots__ = () + def __add__(self, other): return _anonymous_label( - quoted_name( - util.text_type.__add__(self, util.text_type(other)), - self.quote) - ) + quoted_name( + util.text_type.__add__(self, util.text_type(other)), + self.quote) + ) def __radd__(self, other): return _anonymous_label( - quoted_name( - util.text_type.__add__(util.text_type(other), self), - self.quote) - ) + quoted_name( + util.text_type.__add__(util.text_type(other), self), + self.quote) + ) def apply_map(self, map_): if self.quote is not None: @@ -3215,7 +3664,7 @@ def _string_or_unprintable(element): else: try: return str(element) - except: + except Exception: return "unprintable element %r" % element @@ -3246,14 +3695,17 @@ def _cloned_intersection(a, b): return set(elem for elem in a if all_overlap.intersection(elem._cloned_set)) + def _cloned_difference(a, b): all_overlap = set(_expand_cloned(a)).intersection(_expand_cloned(b)) return set(elem for elem in a - if not all_overlap.intersection(elem._cloned_set)) + if not all_overlap.intersection(elem._cloned_set)) -def _labeled(element): - if not hasattr(element, 'name'): +@util.dependencies("sqlalchemy.sql.functions") +def _labeled(functions, element): + if not hasattr(element, 'name') or \ + isinstance(element, functions.FunctionElement): return element.label(None) else: return element @@ -3279,7 +3731,7 @@ def _find_columns(clause): # however the inspect() versions add significant callcount # overhead for critical functions like _interpret_as_column_or_from(). # Generally, the column-based functions are more performance critical -# and are fine just checking for __clause_element__(). it's only +# and are fine just checking for __clause_element__(). It is only # _interpret_as_from() where we'd like to be able to receive ORM entities # that have no defined namespace, hence inspect() is needed there. @@ -3302,18 +3754,53 @@ def _clause_element_as_expr(element): return element -def _literal_as_text(element): +def _literal_as_label_reference(element): + if isinstance(element, util.string_types): + return _textual_label_reference(element) + + elif hasattr(element, '__clause_element__'): + element = element.__clause_element__() + + return _literal_as_text(element) + + +def _literal_and_labels_as_label_reference(element): + if isinstance(element, util.string_types): + return _textual_label_reference(element) + + elif hasattr(element, '__clause_element__'): + element = element.__clause_element__() + + if isinstance(element, ColumnElement) and \ + element._order_by_label_element is not None: + return _label_reference(element) + else: + return _literal_as_text(element) + + +def _expression_literal_as_text(element): + return _literal_as_text(element, warn=True) + + +def _literal_as_text(element, warn=False): if isinstance(element, Visitable): return element elif hasattr(element, '__clause_element__'): return element.__clause_element__() elif isinstance(element, util.string_types): + if warn: + util.warn_limited( + "Textual SQL expression %(expr)r should be " + "explicitly declared as text(%(expr)r)", + {"expr": util.ellipses_string(element)}) + return TextClause(util.text_type(element)) elif isinstance(element, (util.NoneType, bool)): return _const_expr(element) else: raise exc.ArgumentError( - "SQL expression object or string expected." + "SQL expression object or string expected, got object of type %r " + "instead" % type(element) ) @@ -3331,7 +3818,7 @@ def _no_literals(element): def _is_literal(element): return not isinstance(element, Visitable) and \ - not hasattr(element, '__clause_element__') + not hasattr(element, '__clause_element__') def _only_column_elements_or_none(element, name): @@ -3346,10 +3833,11 @@ def _only_column_elements(element, name): element = element.__clause_element__() if not isinstance(element, ColumnElement): raise exc.ArgumentError( - "Column-based expression object expected for argument " - "'%s'; got: '%s', type %s" % (name, element, type(element))) + "Column-based expression object expected for argument " + "'%s'; got: '%s', type %s" % (name, element, type(element))) return element + def _literal_as_binds(element, name=None, type_=None): if hasattr(element, '__clause_element__'): return element.__clause_element__() @@ -3361,6 +3849,8 @@ def _literal_as_binds(element, name=None, type_=None): else: return element +_guess_straight_column = re.compile(r'^\w\S*$', re.I) + def _interpret_as_column_or_from(element): if isinstance(element, Visitable): @@ -3375,7 +3865,31 @@ def _interpret_as_column_or_from(element): elif hasattr(insp, "selectable"): return insp.selectable - return ColumnClause(str(element), is_literal=True) + # be forgiving as this is an extremely common + # and known expression + if element == "*": + guess_is_literal = True + elif isinstance(element, (numbers.Number)): + return ColumnClause(str(element), is_literal=True) + else: + element = str(element) + # give into temptation, as this fact we are guessing about + # is not one we've previously ever needed our users tell us; + # but let them know we are not happy about it + guess_is_literal = not _guess_straight_column.match(element) + util.warn_limited( + "Textual column expression %(column)r should be " + "explicitly declared with text(%(column)r), " + "or use %(literal_column)s(%(column)r) " + "for more specificity", + { + "column": util.ellipses_string(element), + "literal_column": "literal_column" + if guess_is_literal else "column" + }) + return ColumnClause( + element, + is_literal=guess_is_literal) def _const_expr(element): @@ -3402,18 +3916,18 @@ def _type_from_args(args): def _corresponding_column_or_error(fromclause, column, - require_embedded=False): + require_embedded=False): c = fromclause.corresponding_column(column, - require_embedded=require_embedded) + require_embedded=require_embedded) if c is None: raise exc.InvalidRequestError( - "Given column '%s', attached to table '%s', " - "failed to locate a corresponding column from table '%s'" - % - (column, - getattr(column, 'table', None), - fromclause.description) - ) + "Given column '%s', attached to table '%s', " + "failed to locate a corresponding column from table '%s'" + % + (column, + getattr(column, 'table', None), + fromclause.description) + ) return c @@ -3449,3 +3963,6 @@ def key(self): def info(self): return self._Annotated__element.info + @util.memoized_property + def anon_label(self): + return self._Annotated__element.anon_label diff --git a/lib/sqlalchemy/sql/expression.py b/lib/sqlalchemy/sql/expression.py index c99665b426..832779cae4 100644 --- a/lib/sqlalchemy/sql/expression.py +++ b/lib/sqlalchemy/sql/expression.py @@ -1,5 +1,6 @@ # sql/expression.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -27,26 +28,26 @@ from .visitors import Visitable -from .functions import func, modifier, FunctionElement +from .functions import func, modifier, FunctionElement, Function from ..util.langhelpers import public_factory from .elements import ClauseElement, ColumnElement,\ - BindParameter, UnaryExpression, BooleanClauseList, \ - Label, Cast, Case, ColumnClause, TextClause, Over, Null, \ - True_, False_, BinaryExpression, Tuple, TypeClause, Extract, \ - Grouping, not_, \ - collate, literal_column, between,\ - literal, outparam, type_coerce, ClauseList + BindParameter, UnaryExpression, BooleanClauseList, \ + Label, Cast, Case, ColumnClause, TextClause, Over, Null, \ + True_, False_, BinaryExpression, Tuple, TypeClause, Extract, \ + Grouping, not_, \ + collate, literal_column, between,\ + literal, outparam, type_coerce, ClauseList, FunctionFilter from .elements import SavepointClause, RollbackToSavepointClause, \ - ReleaseSavepointClause + ReleaseSavepointClause from .base import ColumnCollection, Generative, Executable, \ - PARSE_AUTOCOMMIT + PARSE_AUTOCOMMIT from .selectable import Alias, Join, Select, Selectable, TableClause, \ - CompoundSelect, CTE, FromClause, FromGrouping, SelectBase, \ - alias, GenerativeSelect, \ - subquery, HasPrefixes, Exists, ScalarSelect, TextAsFrom + CompoundSelect, CTE, FromClause, FromGrouping, SelectBase, \ + alias, GenerativeSelect, \ + subquery, HasPrefixes, HasSuffixes, Exists, ScalarSelect, TextAsFrom from .dml import Insert, Update, Delete, UpdateBase, ValuesBase @@ -70,39 +71,48 @@ extract = public_factory(Extract, ".expression.extract") tuple_ = public_factory(Tuple, ".expression.tuple_") except_ = public_factory(CompoundSelect._create_except, ".expression.except_") -except_all = public_factory(CompoundSelect._create_except_all, ".expression.except_all") -intersect = public_factory(CompoundSelect._create_intersect, ".expression.intersect") -intersect_all = public_factory(CompoundSelect._create_intersect_all, ".expression.intersect_all") +except_all = public_factory( + CompoundSelect._create_except_all, ".expression.except_all") +intersect = public_factory( + CompoundSelect._create_intersect, ".expression.intersect") +intersect_all = public_factory( + CompoundSelect._create_intersect_all, ".expression.intersect_all") union = public_factory(CompoundSelect._create_union, ".expression.union") -union_all = public_factory(CompoundSelect._create_union_all, ".expression.union_all") +union_all = public_factory( + CompoundSelect._create_union_all, ".expression.union_all") exists = public_factory(Exists, ".expression.exists") -nullsfirst = public_factory(UnaryExpression._create_nullsfirst, ".expression.nullsfirst") -nullslast = public_factory(UnaryExpression._create_nullslast, ".expression.nullslast") +nullsfirst = public_factory( + UnaryExpression._create_nullsfirst, ".expression.nullsfirst") +nullslast = public_factory( + UnaryExpression._create_nullslast, ".expression.nullslast") asc = public_factory(UnaryExpression._create_asc, ".expression.asc") desc = public_factory(UnaryExpression._create_desc, ".expression.desc") -distinct = public_factory(UnaryExpression._create_distinct, ".expression.distinct") -true = public_factory(True_._singleton, ".expression.true") -false = public_factory(False_._singleton, ".expression.false") -null = public_factory(Null._singleton, ".expression.null") +distinct = public_factory( + UnaryExpression._create_distinct, ".expression.distinct") +true = public_factory(True_._instance, ".expression.true") +false = public_factory(False_._instance, ".expression.false") +null = public_factory(Null._instance, ".expression.null") join = public_factory(Join._create_join, ".expression.join") outerjoin = public_factory(Join._create_outerjoin, ".expression.outerjoin") insert = public_factory(Insert, ".expression.insert") update = public_factory(Update, ".expression.update") delete = public_factory(Delete, ".expression.delete") +funcfilter = public_factory( + FunctionFilter, ".expression.funcfilter") # internal functions still being called from tests and the ORM, # these might be better off in some other namespace from .base import _from_objects from .elements import _literal_as_text, _clause_element_as_expr,\ - _is_column, _labeled, _only_column_elements, _string_or_unprintable, \ + _is_column, _labeled, _only_column_elements, _string_or_unprintable, \ _truncated_label, _clone, _cloned_difference, _cloned_intersection,\ _column_as_key, _literal_as_binds, _select_iterables, \ - _corresponding_column_or_error + _corresponding_column_or_error, _literal_as_label_reference, \ + _expression_literal_as_text from .selectable import _interpret_as_from - # old names for compatibility _Executable = Executable _BindParamClause = BindParameter diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py index a9b88b13b9..1ae8bea251 100644 --- a/lib/sqlalchemy/sql/functions.py +++ b/lib/sqlalchemy/sql/functions.py @@ -1,5 +1,6 @@ # sql/functions.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -8,11 +9,11 @@ """ from . import sqltypes, schema -from .base import Executable +from .base import Executable, ColumnCollection from .elements import ClauseList, Cast, Extract, _literal_as_binds, \ - literal_column, _type_from_args, ColumnElement, _clone,\ - Over, BindParameter -from .selectable import FromClause, Select + literal_column, _type_from_args, ColumnElement, _clone,\ + Over, BindParameter, FunctionFilter +from .selectable import FromClause, Select, Alias from . import operators from .visitors import VisitableType @@ -57,21 +58,33 @@ def __init__(self, *clauses, **kwargs): """ args = [_literal_as_binds(c, self.name) for c in clauses] self.clause_expr = ClauseList( - operator=operators.comma_op, - group_contents=True, *args).\ - self_group() + operator=operators.comma_op, + group_contents=True, *args).\ + self_group() def _execute_on_connection(self, connection, multiparams, params): return connection._execute_function(self, multiparams, params) @property def columns(self): - """Fulfill the 'columns' contract of :class:`.ColumnElement`. + """The set of columns exported by this :class:`.FunctionElement`. + + Function objects currently have no result column names built in; + this method returns a single-element column collection with + an anonymously named column. + + An interim approach to providing named columns for a function + as a FROM clause is to build a :func:`.select` with the + desired columns:: + + from sqlalchemy.sql import column + + stmt = select([column('x'), column('y')]).\ + select_from(func.myfunction()) - Returns a single-element list consisting of this object. """ - return [self] + return ColumnCollection(self.label(None)) @util.memoized_property def clauses(self): @@ -103,6 +116,35 @@ def over(self, partition_by=None, order_by=None): """ return Over(self, partition_by=partition_by, order_by=order_by) + def filter(self, *criterion): + """Produce a FILTER clause against this function. + + Used against aggregate and window functions, + for database backends that support the "FILTER" clause. + + The expression:: + + func.count(1).filter(True) + + is shorthand for:: + + from sqlalchemy import funcfilter + funcfilter(func.count(1), True) + + .. versionadded:: 1.0.0 + + .. seealso:: + + :class:`.FunctionFilter` + + :func:`.funcfilter` + + + """ + if not criterion: + return self + return FunctionFilter(self, *criterion) + @property def _from_objects(self): return self.clauses._from_objects @@ -115,6 +157,38 @@ def _copy_internals(self, clone=_clone, **kw): self._reset_exported() FunctionElement.clauses._reset(self) + def alias(self, name=None, flat=False): + """Produce a :class:`.Alias` construct against this + :class:`.FunctionElement`. + + This construct wraps the function in a named alias which + is suitable for the FROM clause, in the style accepted for example + by Postgresql. + + e.g.:: + + from sqlalchemy.sql import column + + stmt = select([column('data_view')]).\\ + select_from(SomeTable).\\ + select_from(func.unnest(SomeTable.data).alias('data_view') + ) + + Would produce: + + .. sourcecode:: sql + + SELECT data_view + FROM sometable, unnest(sometable.data) AS data_view + + .. versionadded:: 0.9.8 The :meth:`.FunctionElement.alias` method + is now supported. Previously, this method's behavior was + undefined and did not behave consistently across versions. + + """ + + return Alias(self, name) + def select(self): """Produce a :func:`~.expression.select` construct against this :class:`.FunctionElement`. @@ -159,7 +233,7 @@ def execute(self): def _bind_param(self, operator, obj): return BindParameter(None, obj, _compared_to_operator=operator, - _compared_to_type=self.type, unique=True) + _compared_to_type=self.type, unique=True) class _FunctionGenerator(object): @@ -211,13 +285,13 @@ def __call__(self, *c, **kwargs): :data:`.func` is a special object instance which generates SQL functions based on name-based attributes, e.g.:: - >>> print func.count(1) + >>> print(func.count(1)) count(:param_1) The element is a column-oriented SQL element like any other, and is used in that way:: - >>> print select([func.count(table.c.id)]) + >>> print(select([func.count(table.c.id)])) SELECT count(sometable.id) FROM sometable Any name can be given to :data:`.func`. If the function name is unknown to @@ -225,13 +299,13 @@ def __call__(self, *c, **kwargs): which SQLAlchemy is aware of, the name may be interpreted as a *generic function* which will be compiled appropriately to the target database:: - >>> print func.current_timestamp() + >>> print(func.current_timestamp()) CURRENT_TIMESTAMP To call functions which are present in dot-separated packages, specify them in the same manner:: - >>> print func.stats.yield_curve(5, 10) + >>> print(func.stats.yield_curve(5, 10)) stats.yield_curve(:yield_curve_1, :yield_curve_2) SQLAlchemy can be made aware of the return type of functions to enable @@ -240,8 +314,8 @@ def __call__(self, *c, **kwargs): treated as a string in expressions, specify :class:`~sqlalchemy.types.Unicode` as the type: - >>> print func.my_string(u'hi', type_=Unicode) + ' ' + \ - ... func.my_string(u'there', type_=Unicode) + >>> print(func.my_string(u'hi', type_=Unicode) + ' ' + + ... func.my_string(u'there', type_=Unicode)) my_string(:my_string_1) || :my_string_2 || my_string(:my_string_3) The object returned by a :data:`.func` call is usually an instance of @@ -251,7 +325,7 @@ def __call__(self, *c, **kwargs): method of a :class:`.Connection` or :class:`.Engine`, where it will be wrapped inside of a SELECT statement first:: - print connection.execute(func.current_timestamp()).scalar() + print(connection.execute(func.current_timestamp()).scalar()) In a few exception cases, the :data:`.func` accessor will redirect a name to a built-in expression such as :func:`.cast` @@ -267,10 +341,21 @@ def __call__(self, *c, **kwargs): calculate their return type automatically. For a listing of known generic functions, see :ref:`generic_functions`. + .. note:: + + The :data:`.func` construct has only limited support for calling + standalone "stored procedures", especially those with special + parameterization concerns. + + See the section :ref:`stored_procedures` for details on how to use + the DBAPI-level ``callproc()`` method for fully traditional stored + procedures. + """ modifier = _FunctionGenerator(group=False) + class Function(FunctionElement): """Describe a named SQL function. @@ -305,9 +390,10 @@ def __init__(self, name, *clauses, **kw): def _bind_param(self, operator, obj): return BindParameter(self.name, obj, - _compared_to_operator=operator, - _compared_to_type=self.type, - unique=True) + _compared_to_operator=operator, + _compared_to_type=self.type, + unique=True) + class _GenericMeta(VisitableType): def __init__(cls, clsname, bases, clsdict): @@ -403,8 +489,8 @@ def __init__(self, *args, **kwargs): self.packagenames = [] self._bind = kwargs.get('bind', None) self.clause_expr = ClauseList( - operator=operators.comma_op, - group_contents=True, *parsed_args).self_group() + operator=operators.comma_op, + group_contents=True, *parsed_args).self_group() self.type = sqltypes.to_instance( kwargs.pop("type_", None) or getattr(self, 'type', None)) @@ -414,7 +500,7 @@ def __init__(self, *args, **kwargs): class next_value(GenericFunction): """Represent the 'next value', given a :class:`.Sequence` - as it's single argument. + as its single argument. Compiles into the appropriate function on each backend, or will raise NotImplementedError if used on a backend @@ -426,7 +512,7 @@ class next_value(GenericFunction): def __init__(self, seq, **kw): assert isinstance(seq, schema.Sequence), \ - "next_value() accepts a Sequence object as input." + "next_value() accepts a Sequence object as input." self._bind = kw.get('bind', None) self.sequence = seq diff --git a/lib/sqlalchemy/sql/naming.py b/lib/sqlalchemy/sql/naming.py index 1c5fae1933..2a1a832a42 100644 --- a/lib/sqlalchemy/sql/naming.py +++ b/lib/sqlalchemy/sql/naming.py @@ -1,5 +1,6 @@ # sqlalchemy/naming.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -10,54 +11,15 @@ """ from .schema import Constraint, ForeignKeyConstraint, PrimaryKeyConstraint, \ - UniqueConstraint, CheckConstraint, Index, Table, Column + UniqueConstraint, CheckConstraint, Index, Table, Column from .. import event, events from .. import exc -from .elements import _truncated_label +from .elements import _truncated_label, _defer_name, _defer_none_name, conv import re -class conv(_truncated_label): - """Mark a string indicating that a name has already been converted - by a naming convention. - - This is a string subclass that indicates a name that should not be - subject to any further naming conventions. - - E.g. when we create a :class:`.Constraint` using a naming convention - as follows:: - - m = MetaData(naming_convention={"ck": "ck_%(table_name)s_%(constraint_name)s"}) - t = Table('t', m, Column('x', Integer), - CheckConstraint('x > 5', name='x5')) - - The name of the above constraint will be rendered as ``"ck_t_x5"``. That is, - the existing name ``x5`` is used in the naming convention as the ``constraint_name`` - token. - - In some situations, such as in migration scripts, we may be rendering - the above :class:`.CheckConstraint` with a name that's already been - converted. In order to make sure the name isn't double-modified, the - new name is applied using the :func:`.schema.conv` marker. We can - use this explicitly as follows:: - - - m = MetaData(naming_convention={"ck": "ck_%(table_name)s_%(constraint_name)s"}) - t = Table('t', m, Column('x', Integer), - CheckConstraint('x > 5', name=conv('ck_t_x5'))) - - Where above, the :func:`.schema.conv` marker indicates that the constraint - name here is final, and the name will render as ``"ck_t_x5"`` and not - ``"ck_t_ck_t_x5"`` - - .. versionadded:: 0.9.4 - - .. seealso:: - - :ref:`constraint_naming_conventions` - - """ class ConventionDict(object): + def __init__(self, const, table, convention): self.const = const self._is_fk = isinstance(const, ForeignKeyConstraint) @@ -76,12 +38,12 @@ def _column_X(self, idx): return list(self.const.columns)[idx] def _key_constraint_name(self): - if not self._const_name: + if isinstance(self._const_name, (type(None), _defer_none_name)): raise exc.InvalidRequestError( - "Naming convention including " - "%(constraint_name)s token requires that " - "constraint is explicitly named." - ) + "Naming convention including " + "%(constraint_name)s token requires that " + "constraint is explicitly named." + ) if not isinstance(self._const_name, conv): self.const.name = None return self._const_name @@ -133,6 +95,7 @@ def __getitem__(self, key): ForeignKeyConstraint: "fk" } + def _get_convention(dict_, key): for super_ in key.__mro__: @@ -144,6 +107,26 @@ def _get_convention(dict_, key): return None +def _constraint_name_for_table(const, table): + metadata = table.metadata + convention = _get_convention(metadata.naming_convention, type(const)) + + if isinstance(const.name, conv): + return const.name + elif convention is not None and \ + not isinstance(const.name, conv) and \ + ( + const.name is None or + "constraint_name" in convention or + isinstance(const.name, _defer_name)): + return conv( + convention % ConventionDict(const, table, + metadata.naming_convention) + ) + elif isinstance(convention, _defer_none_name): + return None + + @event.listens_for(Constraint, "after_parent_attach") @event.listens_for(Index, "after_parent_attach") def _constraint_name(const, table): @@ -152,14 +135,12 @@ def _constraint_name(const, table): # to link the column attached to the table as this constraint # associated with the table. event.listen(table, "after_parent_attach", - lambda col, table: _constraint_name(const, table) - ) + lambda col, table: _constraint_name(const, table) + ) elif isinstance(table, Table): - metadata = table.metadata - convention = _get_convention(metadata.naming_convention, type(const)) - if convention is not None: - newname = conv( - convention % ConventionDict(const, table, metadata.naming_convention) - ) - if const.name is None: - const.name = newname + if isinstance(const.name, (conv, _defer_name)): + return + + newname = _constraint_name_for_table(const, table) + if newname is not None: + const.name = newname diff --git a/lib/sqlalchemy/sql/operators.py b/lib/sqlalchemy/sql/operators.py index 91301c78cf..5e2900d8c2 100644 --- a/lib/sqlalchemy/sql/operators.py +++ b/lib/sqlalchemy/sql/operators.py @@ -1,5 +1,6 @@ # sql/operators.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -14,8 +15,8 @@ from operator import ( and_, or_, inv, add, mul, sub, mod, truediv, lt, le, ne, gt, ge, eq, neg, - getitem, lshift, rshift - ) + getitem, lshift, rshift, contains +) if util.py2k: from operator import div @@ -23,11 +24,11 @@ div = truediv - class Operators(object): """Base of comparison and logical operators. - Implements base methods :meth:`~sqlalchemy.sql.operators.Operators.operate` and + Implements base methods + :meth:`~sqlalchemy.sql.operators.Operators.operate` and :meth:`~sqlalchemy.sql.operators.Operators.reverse_operate`, as well as :meth:`~sqlalchemy.sql.operators.Operators.__and__`, :meth:`~sqlalchemy.sql.operators.Operators.__or__`, @@ -37,6 +38,8 @@ class Operators(object): :class:`.ColumnOperators`. """ + __slots__ = () + def __and__(self, other): """Implement the ``&`` operator. @@ -135,13 +138,13 @@ def op(self, opstring, precedence=0, is_comparison=False): .. versionadded:: 0.8 - added the 'precedence' argument. :param is_comparison: if True, the operator will be considered as a - "comparison" operator, that is which evaulates to a boolean true/false - value, like ``==``, ``>``, etc. This flag should be set so that - ORM relationships can establish that the operator is a comparison - operator when used in a custom join condition. + "comparison" operator, that is which evaluates to a boolean + true/false value, like ``==``, ``>``, etc. This flag should be set + so that ORM relationships can establish that the operator is a + comparison operator when used in a custom join condition. - .. versionadded:: 0.9.2 - added the :paramref:`.Operators.op.is_comparison` - flag. + .. versionadded:: 0.9.2 - added the + :paramref:`.Operators.op.is_comparison` flag. .. seealso:: @@ -265,6 +268,8 @@ def eq(a, b): """ + __slots__ = () + timetuple = None """Hack, allows datetime objects to be compared on the LHS.""" @@ -328,6 +333,9 @@ def __neg__(self): """ return self.operate(neg) + def __contains__(self, other): + return self.operate(contains, other) + def __getitem__(self, index): """Implement the [] operator. @@ -421,8 +429,8 @@ def in_(self, other): def notin_(self, other): """implement the ``NOT IN`` operator. - This is equivalent to using negation with :meth:`.ColumnOperators.in_`, - i.e. ``~x.in_(y)``. + This is equivalent to using negation with + :meth:`.ColumnOperators.in_`, i.e. ``~x.in_(y)``. .. versionadded:: 0.8 @@ -518,11 +526,19 @@ def contains(self, other, **kwargs): return self.operate(contains_op, other, **kwargs) def match(self, other, **kwargs): - """Implements the 'match' operator. + """Implements a database-specific 'match' operator. - In a column context, this produces a MATCH clause, i.e. - ``MATCH ''``. The allowed contents of ``other`` - are database backend specific. + :meth:`~.ColumnOperators.match` attempts to resolve to + a MATCH-like function or operator provided by the backend. + Examples include: + + * Postgresql - renders ``x @@ to_tsquery(y)`` + * MySQL - renders ``MATCH (x) AGAINST (y IN BOOLEAN MODE)`` + * Oracle - renders ``CONTAINS(x, y)`` + * other backends may provide special implementations. + * Backends without any special implementation will emit + the operator as "MATCH". This is compatible with SQlite, for + example. """ return self.operate(match_op, other, **kwargs) @@ -584,10 +600,20 @@ def __rdiv__(self, other): """ return self.reverse_operate(div, other) - def between(self, cleft, cright): + def __rmod__(self, other): + """Implement the ``%`` operator in reverse. + + See :meth:`.ColumnOperators.__mod__`. + + """ + return self.reverse_operate(mod, other) + + def between(self, cleft, cright, symmetric=False): """Produce a :func:`~.expression.between` clause against - the parent object, given the lower and upper range.""" - return self.operate(between_op, cleft, cright) + the parent object, given the lower and upper range. + + """ + return self.operate(between_op, cleft, cright, symmetric=symmetric) def distinct(self): """Produce a :func:`~.expression.distinct` clause against the @@ -672,9 +698,11 @@ def exists(): def istrue(a): raise NotImplementedError() + def isfalse(a): raise NotImplementedError() + def is_(a, b): return a.is_(b) @@ -707,8 +735,12 @@ def notilike_op(a, b, escape=None): return a.notilike(b, escape=escape) -def between_op(a, b, c): - return a.between(b, c) +def between_op(a, b, c, symmetric=False): + return a.between(b, c, symmetric=symmetric) + + +def notbetween_op(a, b, c, symmetric=False): + return a.notbetween(b, c, symmetric=symmetric) def in_op(a, b): @@ -747,8 +779,12 @@ def notcontains_op(a, b, escape=None): return ~a.contains(b, escape=escape) -def match_op(a, b): - return a.match(b) +def match_op(a, b, **kw): + return a.match(b, **kw) + + +def notmatch_op(a, b, **kw): + return a.notmatch(b, **kw) def comma_op(a, b): @@ -777,7 +813,7 @@ def nullslast_op(a): _commutative = set([eq, ne, add, mul]) -_comparison = set([eq, ne, lt, gt, ge, le, between_op]) +_comparison = set([eq, ne, lt, gt, ge, le, between_op, like_op]) def is_comparison(op): @@ -791,7 +827,7 @@ def is_commutative(op): def is_ordering_modifier(op): return op in (asc_op, desc_op, - nullsfirst_op, nullslast_op) + nullsfirst_op, nullslast_op) _associative = _commutative.union([concat_op, and_, or_]) @@ -818,6 +854,7 @@ def is_ordering_modifier(op): concat_op: 6, match_op: 6, + notmatch_op: 6, ilike_op: 6, notilike_op: 6, @@ -837,6 +874,7 @@ def is_ordering_modifier(op): le: 5, between_op: 5, + notbetween_op: 5, distinct_op: 5, inv: 5, istrue: 5, @@ -862,6 +900,6 @@ def is_precedent(operator, against): return False else: return (_PRECEDENCE.get(operator, - getattr(operator, 'precedence', _smallest)) <= - _PRECEDENCE.get(against, - getattr(against, 'precedence', _largest))) + getattr(operator, 'precedence', _smallest)) <= + _PRECEDENCE.get(against, + getattr(against, 'precedence', _largest))) diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index e29fe456f1..c122ee4e8c 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -1,5 +1,6 @@ # sql/schema.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -36,8 +37,8 @@ from . import type_api from .base import _bind_or_error, ColumnCollection from .elements import ClauseElement, ColumnClause, _truncated_label, \ - _as_truncated, TextClause, _literal_as_text,\ - ColumnElement, _find_columns, quoted_name + _as_truncated, TextClause, _literal_as_text,\ + ColumnElement, _find_columns, quoted_name from .selectable import TableClause import collections import sqlalchemy @@ -54,7 +55,6 @@ def _get_table_key(name, schema): return schema + "." + name - @inspection._self_inspects class SchemaItem(SchemaEventTarget, visitors.Visitable): """Base class for items that define a database schema.""" @@ -76,7 +76,7 @@ def get_children(self, **kwargs): return [] def __repr__(self): - return util.generic_repr(self) + return util.generic_repr(self, omit_kwarg=['info']) @property @util.deprecated('0.9', 'Use ``.name.quote``') @@ -161,46 +161,90 @@ class Table(DialectKWArgs, SchemaItem, TableClause): :class:`.SchemaItem` constructs may be added here, including :class:`.PrimaryKeyConstraint`, and :class:`.ForeignKeyConstraint`. - :param autoload: Defaults to False: the Columns for this table should - be reflected from the database. Usually there will be no Column - objects in the constructor if this property is set. + :param autoload: Defaults to False, unless :paramref:`.Table.autoload_with` + is set in which case it defaults to True; :class:`.Column` objects + for this table should be reflected from the database, possibly + augmenting or replacing existing :class:`.Column` objects that were + expicitly specified. + + .. versionchanged:: 1.0.0 setting the :paramref:`.Table.autoload_with` + parameter implies that :paramref:`.Table.autoload` will default + to True. + + .. seealso:: + + :ref:`metadata_reflection_toplevel` - :param autoload_replace: If ``True``, when using ``autoload=True`` - and ``extend_existing=True``, - replace ``Column`` objects already present in the ``Table`` that's - in the ``MetaData`` registry with - what's reflected. Otherwise, all existing columns will be - excluded from the reflection process. Note that this does - not impact ``Column`` objects specified in the same call to ``Table`` - which includes ``autoload``, those always take precedence. - Defaults to ``True``. + :param autoload_replace: Defaults to ``True``; when using + :paramref:`.Table.autoload` + in conjunction with :paramref:`.Table.extend_existing`, indicates + that :class:`.Column` objects present in the already-existing + :class:`.Table` object should be replaced with columns of the same + name retrieved from the autoload process. When ``False``, columns + already present under existing names will be omitted from the + reflection process. + + Note that this setting does not impact :class:`.Column` objects + specified programmatically within the call to :class:`.Table` that + also is autoloading; those :class:`.Column` objects will always + replace existing columns of the same name when + :paramref:`.Table.extend_existing` is ``True``. .. versionadded:: 0.7.5 - :param autoload_with: If autoload==True, this is an optional Engine - or Connection instance to be used for the table reflection. If - ``None``, the underlying MetaData's bound connectable will be used. + .. seealso:: + + :paramref:`.Table.autoload` + + :paramref:`.Table.extend_existing` + + :param autoload_with: An :class:`.Engine` or :class:`.Connection` object + with which this :class:`.Table` object will be reflected; when + set to a non-None value, it implies that :paramref:`.Table.autoload` + is ``True``. If left unset, but :paramref:`.Table.autoload` is + explicitly set to ``True``, an autoload operation will attempt to + proceed by locating an :class:`.Engine` or :class:`.Connection` bound + to the underlying :class:`.MetaData` object. + + .. seealso:: + + :paramref:`.Table.autoload` :param extend_existing: When ``True``, indicates that if this :class:`.Table` is already present in the given :class:`.MetaData`, apply further arguments within the constructor to the existing :class:`.Table`. - If ``extend_existing`` or ``keep_existing`` are not set, an error is - raised if additional table modifiers are specified when - the given :class:`.Table` is already present in the :class:`.MetaData`. - - .. versionchanged:: 0.7.4 - ``extend_existing`` will work in conjunction - with ``autoload=True`` to run a new reflection operation against - the database; new :class:`.Column` objects will be produced - from database metadata to replace those existing with the same - name, and additional :class:`.Column` objects not present - in the :class:`.Table` will be added. - - As is always the case with ``autoload=True``, :class:`.Column` - objects can be specified in the same :class:`.Table` constructor, - which will take precedence. I.e.:: + If :paramref:`.Table.extend_existing` or + :paramref:`.Table.keep_existing` are not set, and the given name + of the new :class:`.Table` refers to a :class:`.Table` that is + already present in the target :class:`.MetaData` collection, and + this :class:`.Table` specifies additional columns or other constructs + or flags that modify the table's state, an + error is raised. The purpose of these two mutually-exclusive flags + is to specify what action should be taken when a :class:`.Table` + is specified that matches an existing :class:`.Table`, yet specifies + additional constructs. + + :paramref:`.Table.extend_existing` will also work in conjunction + with :paramref:`.Table.autoload` to run a new reflection + operation against the database, even if a :class:`.Table` + of the same name is already present in the target + :class:`.MetaData`; newly reflected :class:`.Column` objects + and other options will be added into the state of the + :class:`.Table`, potentially overwriting existing columns + and options of the same name. + + .. versionchanged:: 0.7.4 :paramref:`.Table.extend_existing` will + invoke a new reflection operation when combined with + :paramref:`.Table.autoload` set to True. + + As is always the case with :paramref:`.Table.autoload`, + :class:`.Column` objects can be specified in the same :class:`.Table` + constructor, which will take precedence. Below, the existing + table ``mytable`` will be augmented with :class:`.Column` objects + both reflected from the database, as well as the given :class:`.Column` + named "y":: Table("mytable", metadata, Column('y', Integer), @@ -209,10 +253,14 @@ class Table(DialectKWArgs, SchemaItem, TableClause): autoload_with=engine ) - The above will overwrite all columns within ``mytable`` which - are present in the database, except for ``y`` which will be used as is - from the above definition. If the ``autoload_replace`` flag - is set to False, no existing columns will be replaced. + .. seealso:: + + :paramref:`.Table.autoload` + + :paramref:`.Table.autoload_replace` + + :paramref:`.Table.keep_existing` + :param implicit_returning: True by default - indicates that RETURNING can be used by default to fetch newly inserted primary key @@ -236,11 +284,22 @@ class Table(DialectKWArgs, SchemaItem, TableClause): to define a new :class:`.Table` on first call, but on subsequent calls will return the same :class:`.Table`, without any of the declarations (particularly constraints) - being applied a second time. Also see extend_existing. + being applied a second time. + + If :paramref:`.Table.extend_existing` or + :paramref:`.Table.keep_existing` are not set, and the given name + of the new :class:`.Table` refers to a :class:`.Table` that is + already present in the target :class:`.MetaData` collection, and + this :class:`.Table` specifies additional columns or other constructs + or flags that modify the table's state, an + error is raised. The purpose of these two mutually-exclusive flags + is to specify what action should be taken when a :class:`.Table` + is specified that matches an existing :class:`.Table`, yet specifies + additional constructs. + + .. seealso:: - If extend_existing or keep_existing are not set, an error is - raised if additional table modifiers are specified when - the given :class:`.Table` is already present in the :class:`.MetaData`. + :paramref:`.Table.extend_existing` :param listeners: A list of tuples of the form ``(, )`` which will be passed to :func:`.event.listen` upon construction. @@ -286,10 +345,10 @@ def listen_for_reflect(table, column_info): ``name`` parameter, in that quoting is applied for reserved words or case-sensitive names; to enable unconditional quoting for the schema name, specify the flag - ``quote_schema=True`` to the constructor, or use the :class:`.quoted_name` - construct to specify the name. + ``quote_schema=True`` to the constructor, or use the + :class:`.quoted_name` construct to specify the name. - :param useexisting: Deprecated. Use extend_existing. + :param useexisting: Deprecated. Use :paramref:`.Table.extend_existing`. :param \**kw: Additional keyword arguments not mentioned above are dialect specific, and passed in the form ``_``. @@ -353,9 +412,8 @@ def __new__(cls, *args, **kw): table.dispatch.after_parent_attach(table, metadata) return table except: - metadata._remove_table(name, schema) - raise - + with util.safe_reraise(): + metadata._remove_table(name, schema) @property @util.deprecated('0.9', 'Use ``table.schema.quote``') @@ -378,7 +436,8 @@ def __init__(self, *args, **kw): # calling the superclass constructor. def _init(self, name, metadata, *args, **kwargs): - super(Table, self).__init__(quoted_name(name, kwargs.pop('quote', None))) + super(Table, self).__init__( + quoted_name(name, kwargs.pop('quote', None))) self.metadata = metadata self.schema = kwargs.pop('schema', None) @@ -399,8 +458,8 @@ def _init(self, name, metadata, *args, **kwargs): else: self.fullname = self.name - autoload = kwargs.pop('autoload', False) autoload_with = kwargs.pop('autoload_with', None) + autoload = kwargs.pop('autoload', autoload_with is not None) # this argument is only used with _init_existing() kwargs.pop('autoload_replace', True) include_columns = kwargs.pop('include_columns', None) @@ -437,16 +496,17 @@ def _autoload(self, metadata, autoload_with, include_columns, self, include_columns, exclude_columns ) else: - bind = _bind_or_error(metadata, - msg="No engine is bound to this Table's MetaData. " - "Pass an engine to the Table via " - "autoload_with=, " - "or associate the MetaData with an engine via " - "metadata.bind=") + bind = _bind_or_error( + metadata, + msg="No engine is bound to this Table's MetaData. " + "Pass an engine to the Table via " + "autoload_with=, " + "or associate the MetaData with an engine via " + "metadata.bind=") bind.run_callable( - bind.dialect.reflecttable, - self, include_columns, exclude_columns - ) + bind.dialect.reflecttable, + self, include_columns, exclude_columns + ) @property def _sorted_constraints(self): @@ -456,9 +516,22 @@ def _sorted_constraints(self): """ return sorted(self.constraints, key=lambda c: c._creation_order) + @property + def foreign_key_constraints(self): + """:class:`.ForeignKeyConstraint` objects referred to by this + :class:`.Table`. + + This list is produced from the collection of :class:`.ForeignKey` + objects currently associated. + + .. versionadded:: 1.0.0 + + """ + return set(fkc.constraint for fkc in self.foreign_keys) + def _init_existing(self, *args, **kwargs): - autoload = kwargs.pop('autoload', False) autoload_with = kwargs.pop('autoload_with', None) + autoload = kwargs.pop('autoload', autoload_with is not None) autoload_replace = kwargs.pop('autoload_replace', True) schema = kwargs.pop('schema', None) if schema and schema != self.schema: @@ -487,7 +560,8 @@ def _init_existing(self, *args, **kwargs): else: exclude_columns = () self._autoload( - self.metadata, autoload_with, include_columns, exclude_columns) + self.metadata, autoload_with, + include_columns, exclude_columns) self._extra_kwargs(**kwargs) self._init_items(*args) @@ -501,12 +575,14 @@ def _init_collections(self): @util.memoized_property def _autoincrement_column(self): for col in self.primary_key: - if col.autoincrement and \ - col.type._type_affinity is not None and \ - issubclass(col.type._type_affinity, type_api.INTEGERTYPE._type_affinity) and \ - (not col.foreign_keys or col.autoincrement == 'ignore_fk') and \ - isinstance(col.default, (type(None), Sequence)) and \ - (col.server_default is None or col.server_default.reflected): + if (col.autoincrement and col.type._type_affinity is not None and + issubclass(col.type._type_affinity, + type_api.INTEGERTYPE._type_affinity) and + (not col.foreign_keys or + col.autoincrement == 'ignore_fk') and + isinstance(col.default, (type(None), Sequence)) and + (col.server_default is None or + col.server_default.reflected)): return col @property @@ -515,8 +591,9 @@ def key(self): This value is used as the dictionary key within the :attr:`.MetaData.tables` collection. It is typically the same - as that of :attr:`.Table.name` for a table with no :attr:`.Table.schema` - set; otherwise it is typically of the form ``schemaname.tablename``. + as that of :attr:`.Table.name` for a table with no + :attr:`.Table.schema` set; otherwise it is typically of the form + ``schemaname.tablename``. """ return _get_table_key(self.name, self.schema) @@ -611,7 +688,7 @@ def _set_parent(self, metadata): self.metadata = metadata def get_children(self, column_collections=True, - schema_visitor=False, **kw): + schema_visitor=False, **kw): if not schema_visitor: return TableClause.get_children( self, column_collections=column_collections, **kw) @@ -628,7 +705,7 @@ def exists(self, bind=None): bind = _bind_or_error(self) return bind.run_callable(bind.dialect.has_table, - self.name, schema=self.schema) + self.name, schema=self.schema) def create(self, bind=None, checkfirst=False): """Issue a ``CREATE`` statement for this @@ -644,8 +721,8 @@ def create(self, bind=None, checkfirst=False): if bind is None: bind = _bind_or_error(self) bind._run_visitor(ddl.SchemaGenerator, - self, - checkfirst=checkfirst) + self, + checkfirst=checkfirst) def drop(self, bind=None, checkfirst=False): """Issue a ``DROP`` statement for this @@ -660,10 +737,11 @@ def drop(self, bind=None, checkfirst=False): if bind is None: bind = _bind_or_error(self) bind._run_visitor(ddl.SchemaDropper, - self, - checkfirst=checkfirst) + self, + checkfirst=checkfirst) - def tometadata(self, metadata, schema=RETAIN_SCHEMA, referred_schema_fn=None): + def tometadata(self, metadata, schema=RETAIN_SCHEMA, + referred_schema_fn=None, name=None): """Return a copy of this :class:`.Table` associated with a different :class:`.MetaData`. @@ -702,9 +780,10 @@ def tometadata(self, metadata, schema=RETAIN_SCHEMA, referred_schema_fn=None): in order to provide for the schema name that should be assigned to the referenced table of a :class:`.ForeignKeyConstraint`. The callable accepts this parent :class:`.Table`, the - target schema that we are changing to, the :class:`.ForeignKeyConstraint` - object, and the existing "target schema" of that constraint. The - function should return the string schema name that should be applied. + target schema that we are changing to, the + :class:`.ForeignKeyConstraint` object, and the existing + "target schema" of that constraint. The function should return the + string schema name that should be applied. E.g.:: def referred_schema_fn(table, to_schema, @@ -719,13 +798,21 @@ def referred_schema_fn(table, to_schema, .. versionadded:: 0.9.2 - """ + :param name: optional string name indicating the target table name. + If not specified or None, the table name is retained. This allows + a :class:`.Table` to be copied to the same :class:`.MetaData` target + with a new name. + + .. versionadded:: 1.0.0 + """ + if name is None: + name = self.name if schema is RETAIN_SCHEMA: schema = self.schema elif schema is None: schema = metadata.schema - key = _get_table_key(self.name, schema) + key = _get_table_key(name, schema) if key in metadata.tables: util.warn("Table '%s' already exists within the given " "MetaData - not copying." % self.description) @@ -735,20 +822,24 @@ def referred_schema_fn(table, to_schema, for c in self.columns: args.append(c.copy(schema=schema)) table = Table( - self.name, metadata, schema=schema, + name, metadata, schema=schema, *args, **self.kwargs - ) + ) for c in self.constraints: if isinstance(c, ForeignKeyConstraint): referred_schema = c._referred_schema if referred_schema_fn: - fk_constraint_schema = referred_schema_fn(self, schema, c, referred_schema) + fk_constraint_schema = referred_schema_fn( + self, schema, c, referred_schema) else: - fk_constraint_schema = schema if referred_schema == self.schema else None - table.append_constraint(c.copy(schema=fk_constraint_schema, target_table=table)) - - else: - table.append_constraint(c.copy(schema=schema, target_table=table)) + fk_constraint_schema = ( + schema if referred_schema == self.schema else None) + table.append_constraint( + c.copy(schema=fk_constraint_schema, target_table=table)) + + elif not c._type_bound: + table.append_constraint( + c.copy(schema=schema, target_table=table)) for index in self.indexes: # skip indexes that would be generated # by the 'index' flag on Column @@ -801,13 +892,13 @@ def __init__(self, *args, **kwargs): The ``type`` argument may be the second positional argument or specified by keyword. - If the ``type`` is ``None`` or is omitted, it will first default to the special - type :class:`.NullType`. If and when this :class:`.Column` is - made to refer to another column using :class:`.ForeignKey` - and/or :class:`.ForeignKeyConstraint`, the type of the remote-referenced - column will be copied to this column as well, at the moment that - the foreign key is resolved against that remote :class:`.Column` - object. + If the ``type`` is ``None`` or is omitted, it will first default to + the special type :class:`.NullType`. If and when this + :class:`.Column` is made to refer to another column using + :class:`.ForeignKey` and/or :class:`.ForeignKeyConstraint`, the type + of the remote-referenced column will be copied to this column as + well, at the moment that the foreign key is resolved against that + remote :class:`.Column` object. .. versionchanged:: 0.9.0 Support for propagation of type to a :class:`.Column` from its @@ -840,10 +931,17 @@ def __init__(self, *args, **kwargs): * Part of the primary key - * Are not referenced by any foreign keys, unless - the value is specified as ``'ignore_fk'`` + * Not refering to another column via :class:`.ForeignKey`, unless + the value is specified as ``'ignore_fk'``:: - .. versionadded:: 0.7.4 + # turn on autoincrement for this column despite + # the ForeignKey() + Column('id', ForeignKey('other.id'), + primary_key=True, autoincrement='ignore_fk') + + It is typically not desirable to have "autoincrement" enabled + on such a column as its value intends to mirror that of a + primary key column elsewhere. * have no server side or client side defaults (with the exception of Postgresql SERIAL). @@ -857,8 +955,11 @@ def __init__(self, *args, **kwargs): SERIAL on Postgresql, and IDENTITY on MS-SQL. It does *not* issue AUTOINCREMENT for SQLite since this is a special SQLite flag that is not required for autoincrementing - behavior. See the SQLite dialect documentation for - information on SQLite's AUTOINCREMENT. + behavior. + + .. seealso:: + + :ref:`sqlite_autoincrement` * The column will be considered to be available as cursor.lastrowid or equivalent, for those dialects which @@ -883,8 +984,12 @@ def __init__(self, *args, **kwargs): a positional argument; see that class for full detail on the structure of the argument. - Contrast this argument to ``server_default`` which creates a - default generator on the database side. + Contrast this argument to :paramref:`.Column.server_default` + which creates a default generator on the database side. + + .. seealso:: + + :ref:`metadata_defaults_toplevel` :param doc: optional String that can be used by the ORM or similar to document attributes. This attribute does not render SQL @@ -941,8 +1046,8 @@ def __init__(self, *args, **kwargs): y DATETIME DEFAULT NOW() - Strings and text() will be converted into a :class:`.DefaultClause` - object upon initialization. + Strings and text() will be converted into a + :class:`.DefaultClause` object upon initialization. Use :class:`.FetchedValue` to indicate that an already-existing column will generate a default value on the database side which @@ -950,6 +1055,10 @@ def __init__(self, *args, **kwargs): construct does not specify any DDL and the implementation is left to the database, such as via a trigger. + .. seealso:: + + :ref:`server_defaults` + :param server_onupdate: A :class:`.FetchedValue` instance representing a database-side default generation function. This indicates to SQLAlchemy that a newly generated value will be @@ -977,12 +1086,12 @@ def __init__(self, *args, **kwargs): database, and should not be included in the columns list for a ``CREATE TABLE`` statement. - For more elaborate scenarios where columns should be conditionally - rendered differently on different backends, consider custom - compilation rules for :class:`.CreateColumn`. + For more elaborate scenarios where columns should be + conditionally rendered differently on different backends, + consider custom compilation rules for :class:`.CreateColumn`. - ..versionadded:: 0.8.3 Added the ``system=True`` parameter to - :class:`.Column`. + .. versionadded:: 0.8.3 Added the ``system=True`` parameter to + :class:`.Column`. """ @@ -1008,7 +1117,7 @@ def __init__(self, *args, **kwargs): name = quoted_name(name, kwargs.pop('quote', None)) elif "quote" in kwargs: raise exc.ArgumentError("Explicit 'name' is required when " - "sending 'quote' argument") + "sending 'quote' argument") super(Column, self).__init__(name, type_) self.key = kwargs.pop('key', name) @@ -1044,8 +1153,12 @@ def __init__(self, *args, **kwargs): else: if getattr(self.type, '_warn_on_bytestring', False): if isinstance(self.default, util.binary_type): - util.warn("Unicode column received non-unicode " - "default value.") + util.warn( + "Unicode column '%s' has non-unicode " + "default value %r specified." % ( + self.key, + self.default + )) args.append(ColumnDefault(self.default)) if self.server_default is not None: @@ -1065,7 +1178,7 @@ def __init__(self, *args, **kwargs): args.append(self.server_onupdate._as_for_update(True)) else: args.append(DefaultClause(self.server_onupdate, - for_update=True)) + for_update=True)) self._init_items(*args) util.set_creation_order(self) @@ -1124,7 +1237,7 @@ def __repr__(self): [repr(x) for x in self.foreign_keys if x is not None] + [repr(x) for x in self.constraints] + [(self.table is not None and "table=<%s>" % - self.table.description or "table=None")] + + self.table.description or "table=None")] + ["%s=%s" % (k, repr(getattr(self, k))) for k in kwarg]) def _set_parent(self, table): @@ -1138,8 +1251,10 @@ def _set_parent(self, table): existing = getattr(self, 'table', None) if existing is not None and existing is not table: raise exc.ArgumentError( - "Column object already assigned to Table '%s'" % - existing.description) + "Column object '%s' already assigned to Table '%s'" % ( + self.key, + existing.description + )) if self.key in table._columns: col = table._columns.get(self.key) @@ -1161,7 +1276,7 @@ def _set_parent(self, table): raise exc.ArgumentError( "Trying to redefine primary-key column '%s' as a " "non-primary-key column on table '%s'" % ( - self.key, table.fullname)) + self.key, table.fullname)) self.table = table if self.index: @@ -1181,15 +1296,24 @@ def _set_parent(self, table): "Index object external to the Table.") table.append_constraint(UniqueConstraint(self.key)) - fk_key = (table.key, self.key) - if fk_key in self.table.metadata._fk_memos: - for fk in self.table.metadata._fk_memos[fk_key]: - fk._set_remote_table(table) + self._setup_on_memoized_fks(lambda fk: fk._set_remote_table(table)) + + def _setup_on_memoized_fks(self, fn): + fk_keys = [ + ((self.table.key, self.key), False), + ((self.table.key, self.name), True), + ] + for fk_key, link_to_name in fk_keys: + if fk_key in self.table.metadata._fk_memos: + for fk in self.table.metadata._fk_memos[fk_key]: + if fk.link_to_name is link_to_name: + fn(fk) def _on_table_attach(self, fn): if self.table is not None: fn(self, self.table) - event.listen(self, 'after_parent_attach', fn) + else: + event.listen(self, 'after_parent_attach', fn) def copy(self, **kw): """Create a copy of this ``Column``, unitialized. @@ -1200,7 +1324,7 @@ def copy(self, **kw): # Constraint objects plus non-constraint-bound ForeignKey objects args = \ - [c.copy(**kw) for c in self.constraints] + \ + [c.copy(**kw) for c in self.constraints if not c._type_bound] + \ [c.copy(**kw) for c in self.foreign_keys if not c.constraint] type_ = self.type @@ -1208,27 +1332,27 @@ def copy(self, **kw): type_ = type_.copy(**kw) c = self._constructor( - name=self.name, - type_=type_, - key=self.key, - primary_key=self.primary_key, - nullable=self.nullable, - unique=self.unique, - system=self.system, - #quote=self.quote, - index=self.index, - autoincrement=self.autoincrement, - default=self.default, - server_default=self.server_default, - onupdate=self.onupdate, - server_onupdate=self.server_onupdate, - doc=self.doc, - *args - ) + name=self.name, + type_=type_, + key=self.key, + primary_key=self.primary_key, + nullable=self.nullable, + unique=self.unique, + system=self.system, + # quote=self.quote, + index=self.index, + autoincrement=self.autoincrement, + default=self.default, + server_default=self.server_default, + onupdate=self.onupdate, + server_onupdate=self.server_onupdate, + doc=self.doc, + *args + ) return self._schema_item_copy(c) def _make_proxy(self, selectable, name=None, key=None, - name_is_truncatable=False, **kw): + name_is_truncatable=False, **kw): """Create a *proxy* for this column. This is a copy of this ``Column`` referenced by a different parent @@ -1238,15 +1362,16 @@ def _make_proxy(self, selectable, name=None, key=None, """ fk = [ForeignKey(f.column, _constraint=f.constraint) - for f in self.foreign_keys] + for f in self.foreign_keys] if name is None and self.name is None: - raise exc.InvalidRequestError("Cannot initialize a sub-selectable" - " with this Column object until it's 'name' has " - "been assigned.") + raise exc.InvalidRequestError( + "Cannot initialize a sub-selectable" + " with this Column object until its 'name' has " + "been assigned.") try: c = self._constructor( - _as_truncated(name or self.name) if \ - name_is_truncatable else (name or self.name), + _as_truncated(name or self.name) if + name_is_truncatable else (name or self.name), self.type, key=key if key else name if name else self.key, primary_key=self.primary_key, @@ -1260,7 +1385,7 @@ def _make_proxy(self, selectable, name=None, key=None, "attribute or method which accepts the " "standard Column constructor arguments, or " "references the Column class itself." % self.__class__) - ) + ) c.table = selectable selectable._columns.add(c) @@ -1320,9 +1445,10 @@ class ForeignKey(DialectKWArgs, SchemaItem): __visit_name__ = 'foreign_key' def __init__(self, column, _constraint=None, use_alter=False, name=None, - onupdate=None, ondelete=None, deferrable=None, - initially=None, link_to_name=False, match=None, - **dialect_kw): + onupdate=None, ondelete=None, deferrable=None, + initially=None, link_to_name=False, match=None, + info=None, + **dialect_kw): """ Construct a column-level FOREIGN KEY. @@ -1364,19 +1490,32 @@ def __init__(self, column, _constraint=None, use_alter=False, name=None, assigned ``key``. :param use_alter: passed to the underlying - :class:`.ForeignKeyConstraint` to indicate the constraint should be - generated/dropped externally from the CREATE TABLE/ DROP TABLE - statement. See that classes' constructor for details. + :class:`.ForeignKeyConstraint` to indicate the constraint should + be generated/dropped externally from the CREATE TABLE/ DROP TABLE + statement. See :paramref:`.ForeignKeyConstraint.use_alter` + for further description. + + .. seealso:: + + :paramref:`.ForeignKeyConstraint.use_alter` + + :ref:`use_alter` :param match: Optional string. If set, emit MATCH when issuing DDL for this constraint. Typical values include SIMPLE, PARTIAL and FULL. - :param \**dialect_kw: Additional keyword arguments are dialect specific, - and passed in the form ``_``. The arguments - are ultimately handled by a corresponding :class:`.ForeignKeyConstraint`. - See the documentation regarding an individual dialect at - :ref:`dialect_toplevel` for detail on documented arguments. + :param info: Optional data dictionary which will be populated into the + :attr:`.SchemaItem.info` attribute of this object. + + .. versionadded:: 1.0.0 + + :param \**dialect_kw: Additional keyword arguments are dialect + specific, and passed in the form ``_``. The + arguments are ultimately handled by a corresponding + :class:`.ForeignKeyConstraint`. See the documentation regarding + an individual dialect at :ref:`dialect_toplevel` for detail on + documented arguments. .. versionadded:: 0.9.2 @@ -1393,13 +1532,14 @@ def __init__(self, column, _constraint=None, use_alter=False, name=None, if not isinstance(self._table_column, ColumnClause): raise exc.ArgumentError( - "String, Column, or Column-bound argument " - "expected, got %r" % self._table_column) - elif not isinstance(self._table_column.table, (util.NoneType, TableClause)): + "String, Column, or Column-bound argument " + "expected, got %r" % self._table_column) + elif not isinstance( + self._table_column.table, (util.NoneType, TableClause)): raise exc.ArgumentError( - "ForeignKey received Column not bound " - "to a Table, got: %r" % self._table_column.table - ) + "ForeignKey received Column not bound " + "to a Table, got: %r" % self._table_column.table + ) # the linked ForeignKeyConstraint. # ForeignKey will create this when parent Column @@ -1416,6 +1556,8 @@ def __init__(self, column, _constraint=None, use_alter=False, name=None, self.initially = initially self.link_to_name = link_to_name self.match = match + if info: + self.info = info self._unvalidated_dialect_kw = dialect_kw def __repr__(self): @@ -1438,21 +1580,20 @@ def copy(self, schema=None): """ fk = ForeignKey( - self._get_colspec(schema=schema), - use_alter=self.use_alter, - name=self.name, - onupdate=self.onupdate, - ondelete=self.ondelete, - deferrable=self.deferrable, - initially=self.initially, - link_to_name=self.link_to_name, - match=self.match, - **self._unvalidated_dialect_kw - ) + self._get_colspec(schema=schema), + use_alter=self.use_alter, + name=self.name, + onupdate=self.onupdate, + ondelete=self.ondelete, + deferrable=self.deferrable, + initially=self.initially, + link_to_name=self.link_to_name, + match=self.match, + **self._unvalidated_dialect_kw + ) return self._schema_item_copy(fk) - - def _get_colspec(self, schema=None): + def _get_colspec(self, schema=None, table_name=None): """Return a string based 'column specification' for this :class:`.ForeignKey`. @@ -1462,10 +1603,18 @@ def _get_colspec(self, schema=None): """ if schema: _schema, tname, colname = self._column_tokens + if table_name is not None: + tname = table_name return "%s.%s.%s" % (schema, tname, colname) + elif table_name: + schema, tname, colname = self._column_tokens + if schema: + return "%s.%s.%s" % (schema, table_name, colname) + else: + return "%s.%s" % (table_name, colname) elif self._table_column is not None: return "%s.%s" % ( - self._table_column.table.fullname, self._table_column.key) + self._table_column.table.fullname, self._table_column.key) else: return self._colspec @@ -1473,7 +1622,6 @@ def _get_colspec(self, schema=None): def _referred_schema(self): return self._column_tokens[0] - def _table_key(self): if self._table_column is not None: if self._table_column.table is None: @@ -1484,8 +1632,6 @@ def _table_key(self): schema, tname, colname = self._column_tokens return _get_table_key(tname, schema) - - target_fullname = property(_get_colspec) def references(self, table): @@ -1539,13 +1685,13 @@ def _column_tokens(self): def _resolve_col_tokens(self): if self.parent is None: raise exc.InvalidRequestError( - "this ForeignKey object does not yet have a " - "parent Column associated with it.") + "this ForeignKey object does not yet have a " + "parent Column associated with it.") elif self.parent.table is None: raise exc.InvalidRequestError( - "this ForeignKey's parent column is not yet associated " - "with a Table.") + "this ForeignKey's parent column is not yet associated " + "with a Table.") parenttable = self.parent.table @@ -1569,7 +1715,6 @@ def _resolve_col_tokens(self): tablekey = _get_table_key(tname, schema) return parenttable, tablekey, colname - def _link_to_col_by_colstring(self, parenttable, table, colname): if not hasattr(self.constraint, '_referred_table'): self.constraint._referred_table = table @@ -1595,9 +1740,10 @@ def _link_to_col_by_colstring(self, parenttable, table, colname): if _column is None: raise exc.NoReferencedColumnError( - "Could not initialize target column for ForeignKey '%s' on table '%s': " - "table '%s' has no column named '%s'" % ( - self._colspec, parenttable.name, table.name, key), + "Could not initialize target column " + "for ForeignKey '%s' on table '%s': " + "table '%s' has no column named '%s'" % + (self._colspec, parenttable.name, table.name, key), table.name, key) self._set_target_column(_column) @@ -1610,11 +1756,11 @@ def _set_target_column(self, column): # super-edgy case, if other FKs point to our column, # they'd get the type propagated out also. if isinstance(self.parent.table, Table): - fk_key = (self.parent.table.key, self.parent.key) - if fk_key in self.parent.table.metadata._fk_memos: - for fk in self.parent.table.metadata._fk_memos[fk_key]: - if fk.parent.type._isnull: - fk.parent.type = column.type + + def set_type(fk): + if fk.parent.type._isnull: + fk.parent.type = column.type + self.parent._setup_on_memoized_fks(set_type) self.column = column @@ -1653,7 +1799,7 @@ def column(self): "Could not initialize target column for " "ForeignKey '%s' on table '%s': " "table '%s' has no column named '%s'" % ( - self._colspec, parenttable.name, tablekey, colname), + self._colspec, parenttable.name, tablekey, colname), tablekey, colname) elif hasattr(self._colspec, '__clause_element__'): _column = self._colspec.__clause_element__() @@ -1665,7 +1811,7 @@ def column(self): def _set_parent(self, column): if self.parent is not None and self.parent is not column: raise exc.InvalidRequestError( - "This ForeignKey already has a parent !") + "This ForeignKey already has a parent !") self.parent = column self.parent.foreign_keys.add(self) self.parent._on_table_attach(self._set_table) @@ -1693,8 +1839,8 @@ def _set_table(self, column, table): deferrable=self.deferrable, initially=self.initially, match=self.match, **self._unvalidated_dialect_kw - ) - self.constraint._elements[self.parent] = self + ) + self.constraint._append_element(column, self) self.constraint._set_parent_with_dispatch(table) table.foreign_keys.add(self) @@ -1706,7 +1852,8 @@ def _set_table(self, column, table): if table_key in parenttable.metadata.tables: table = parenttable.metadata.tables[table_key] try: - self._link_to_col_by_colstring(parenttable, table, colname) + self._link_to_col_by_colstring( + parenttable, table, colname) except exc.NoReferencedColumnError: # this is OK, we'll try later pass @@ -1719,12 +1866,11 @@ def _set_table(self, column, table): self._set_target_column(_column) - class _NotAColumnExpr(object): def _not_a_column_expr(self): raise exc.InvalidRequestError( - "This %s cannot be used directly " - "as a column expression." % self.__class__.__name__) + "This %s cannot be used directly " + "as a column expression." % self.__class__.__name__) __clause_element__ = self_group = lambda self: self._not_a_column_expr() _from_objects = property(lambda self: self._not_a_column_expr()) @@ -1830,13 +1976,13 @@ def is_clause_element(self): @util.memoized_property def is_scalar(self): return not self.is_callable and \ - not self.is_clause_element and \ - not self.is_sequence + not self.is_clause_element and \ + not self.is_sequence def _maybe_wrap_callable(self, fn): """Wrap callables that don't accept a context. - This is to allow easy compatiblity with default callables + This is to allow easy compatibility with default callables that aren't specific to accepting of a context. """ @@ -1879,8 +2025,10 @@ class Sequence(DefaultGenerator): The :class:`.Sequence` is typically associated with a primary key column:: - some_table = Table('some_table', metadata, - Column('id', Integer, Sequence('some_table_seq'), primary_key=True) + some_table = Table( + 'some_table', metadata, + Column('id', Integer, Sequence('some_table_seq'), + primary_key=True) ) When CREATE TABLE is emitted for the above :class:`.Table`, if the @@ -1900,8 +2048,9 @@ class Sequence(DefaultGenerator): is_sequence = True - def __init__(self, name, start=None, increment=None, schema=None, - optional=False, quote=None, metadata=None, + def __init__(self, name, start=None, increment=None, minvalue=None, + maxvalue=None, nominvalue=None, nomaxvalue=None, cycle=None, + schema=None, optional=False, quote=None, metadata=None, quote_schema=None, for_update=False): """Construct a :class:`.Sequence` object. @@ -1917,6 +2066,53 @@ def __init__(self, name, start=None, increment=None, schema=None, the database as the value of the "INCREMENT BY" clause. If ``None``, the clause is omitted, which on most platforms indicates an increment of 1. + :param minvalue: the minimum value of the sequence. This + value is used when the CREATE SEQUENCE command is emitted to + the database as the value of the "MINVALUE" clause. If ``None``, + the clause is omitted, which on most platforms indicates a + minvalue of 1 and -2^63-1 for ascending and descending sequences, + respectively. + + .. versionadded:: 1.0.7 + + :param maxvalue: the maximum value of the sequence. This + value is used when the CREATE SEQUENCE command is emitted to + the database as the value of the "MAXVALUE" clause. If ``None``, + the clause is omitted, which on most platforms indicates a + maxvalue of 2^63-1 and -1 for ascending and descending sequences, + respectively. + + .. versionadded:: 1.0.7 + + :param nominvalue: no minimum value of the sequence. This + value is used when the CREATE SEQUENCE command is emitted to + the database as the value of the "NO MINVALUE" clause. If ``None``, + the clause is omitted, which on most platforms indicates a + minvalue of 1 and -2^63-1 for ascending and descending sequences, + respectively. + + .. versionadded:: 1.0.7 + + :param nomaxvalue: no maximum value of the sequence. This + value is used when the CREATE SEQUENCE command is emitted to + the database as the value of the "NO MAXVALUE" clause. If ``None``, + the clause is omitted, which on most platforms indicates a + maxvalue of 2^63-1 and -1 for ascending and descending sequences, + respectively. + + .. versionadded:: 1.0.7 + + :param cycle: allows the sequence to wrap around when the maxvalue + or minvalue has been reached by an ascending or descending sequence + respectively. This value is used when the CREATE SEQUENCE command + is emitted to the database as the "CYCLE" clause. If the limit is + reached, the next number generated will be the minvalue or maxvalue, + respectively. If cycle=False (the default) any calls to nextval + after the sequence has reached its maximum value will return an + error. + + .. versionadded:: 1.0.7 + :param schema: Optional schema name for the sequence, if located in a schema other than the default. :param optional: boolean value, when ``True``, indicates that this @@ -1927,8 +2123,8 @@ def __init__(self, name, start=None, increment=None, schema=None, creates a sequence for us automatically". :param quote: boolean value, when ``True`` or ``False``, explicitly forces quoting of the schema name on or off. When left at its - default of ``None``, normal quoting rules based on casing and reserved - words take place. + default of ``None``, normal quoting rules based on casing and + reserved words take place. :param quote_schema: set the quoting preferences for the ``schema`` name. :param metadata: optional :class:`.MetaData` object which will be @@ -1961,6 +2157,11 @@ def __init__(self, name, start=None, increment=None, schema=None, self.name = quoted_name(name, quote) self.start = start self.increment = increment + self.minvalue = minvalue + self.maxvalue = maxvalue + self.nominvalue = nominvalue + self.nomaxvalue = nomaxvalue + self.cycle = cycle self.optional = optional if metadata is not None and schema is None and metadata.schema: self.schema = schema = metadata.schema @@ -2012,8 +2213,8 @@ def create(self, bind=None, checkfirst=True): if bind is None: bind = _bind_or_error(self) bind._run_visitor(ddl.SchemaGenerator, - self, - checkfirst=checkfirst) + self, + checkfirst=checkfirst) def drop(self, bind=None, checkfirst=True): """Drops this sequence from the database.""" @@ -2021,16 +2222,16 @@ def drop(self, bind=None, checkfirst=True): if bind is None: bind = _bind_or_error(self) bind._run_visitor(ddl.SchemaDropper, - self, - checkfirst=checkfirst) + self, + checkfirst=checkfirst) def _not_a_column_expr(self): raise exc.InvalidRequestError( - "This %s cannot be used directly " - "as a column expression. Use func.next_value(sequence) " - "to produce a 'next value' function that's usable " - "as a column element." - % self.__class__.__name__) + "This %s cannot be used directly " + "as a column expression. Use func.next_value(sequence) " + "to produce a 'next value' function that's usable " + "as a column element." + % self.__class__.__name__) @inspection._self_inspects @@ -2118,7 +2319,7 @@ def __init__(self, arg, for_update=False, _reflected=False): def __repr__(self): return "DefaultClause(%r, for_update=%r)" % \ - (self.arg, self.for_update) + (self.arg, self.for_update) class PassiveDefault(DefaultClause): @@ -2129,9 +2330,9 @@ class PassiveDefault(DefaultClause): Use :class:`.DefaultClause`. """ @util.deprecated("0.6", - ":class:`.PassiveDefault` is deprecated. " - "Use :class:`.DefaultClause`.", - False) + ":class:`.PassiveDefault` is deprecated. " + "Use :class:`.DefaultClause`.", + False) def __init__(self, *arg, **kw): DefaultClause.__init__(self, *arg, **kw) @@ -2142,8 +2343,8 @@ class Constraint(DialectKWArgs, SchemaItem): __visit_name__ = 'constraint' def __init__(self, name=None, deferrable=None, initially=None, - _create_rule=None, - **dialect_kw): + _create_rule=None, info=None, _type_bound=False, + **dialect_kw): """Create a SQL constraint. :param name: @@ -2157,6 +2358,11 @@ def __init__(self, name=None, deferrable=None, initially=None, Optional string. If set, emit INITIALLY when issuing DDL for this constraint. + :param info: Optional data dictionary which will be populated into the + :attr:`.SchemaItem.info` attribute of this object. + + .. versionadded:: 1.0.0 + :param _create_rule: a callable which is passed the DDLCompiler object during compilation. Returns True or False to signal inline generation of @@ -2174,17 +2380,20 @@ def __init__(self, name=None, deferrable=None, initially=None, _create_rule is used by some types to create constraints. Currently, its call signature is subject to change at any time. - :param \**dialect_kw: Additional keyword arguments are dialect specific, - and passed in the form ``_``. See the - documentation regarding an individual dialect at :ref:`dialect_toplevel` - for detail on documented arguments. + :param \**dialect_kw: Additional keyword arguments are dialect + specific, and passed in the form ``_``. See + the documentation regarding an individual dialect at + :ref:`dialect_toplevel` for detail on documented arguments. """ self.name = name self.deferrable = deferrable self.initially = initially + if info: + self.info = info self._create_rule = _create_rule + self._type_bound = _type_bound util.set_creation_order(self) self._validate_dialect_kwargs(dialect_kw) @@ -2196,8 +2405,8 @@ def table(self): except AttributeError: pass raise exc.InvalidRequestError( - "This constraint is not bound to a table. Did you " - "mean to call table.append_constraint(constraint) ?") + "This constraint is not bound to a table. Did you " + "mean to call table.append_constraint(constraint) ?") def _set_parent(self, parent): self.parent = parent @@ -2225,14 +2434,85 @@ def _to_schema_column_or_string(element): class ColumnCollectionMixin(object): - def __init__(self, *columns): + + columns = None + """A :class:`.ColumnCollection` of :class:`.Column` objects. + + This collection represents the columns which are referred to by + this object. + + """ + + _allow_multiple_tables = False + + def __init__(self, *columns, **kw): + _autoattach = kw.pop('_autoattach', True) self.columns = ColumnCollection() self._pending_colargs = [_to_schema_column_or_string(c) - for c in columns] - if self._pending_colargs and \ - isinstance(self._pending_colargs[0], Column) and \ - isinstance(self._pending_colargs[0].table, Table): - self._set_parent_with_dispatch(self._pending_colargs[0].table) + for c in columns] + if _autoattach and self._pending_colargs: + self._check_attach() + + @classmethod + def _extract_col_expression_collection(cls, expressions): + for expr in expressions: + strname = None + column = None + if not isinstance(expr, ClauseElement): + # this assumes a string + strname = expr + else: + cols = [] + visitors.traverse(expr, {}, {'column': cols.append}) + if cols: + column = cols[0] + add_element = column if column is not None else strname + yield expr, column, strname, add_element + + def _check_attach(self, evt=False): + col_objs = [ + c for c in self._pending_colargs + if isinstance(c, Column) + ] + + cols_w_table = [ + c for c in col_objs if isinstance(c.table, Table) + ] + + cols_wo_table = set(col_objs).difference(cols_w_table) + + if cols_wo_table: + # feature #3341 - place event listeners for Column objects + # such that when all those cols are attached, we autoattach. + assert not evt, "Should not reach here on event call" + + # issue #3411 - don't do the per-column auto-attach if some of the + # columns are specified as strings. + has_string_cols = set(self._pending_colargs).difference(col_objs) + if not has_string_cols: + def _col_attached(column, table): + cols_wo_table.discard(column) + if not cols_wo_table: + self._check_attach(evt=True) + self._cols_wo_table = cols_wo_table + for col in cols_wo_table: + col._on_table_attach(_col_attached) + return + + columns = cols_w_table + + tables = set([c.table for c in columns]) + if len(tables) == 1: + self._set_parent_with_dispatch(tables.pop()) + elif len(tables) > 1 and not self._allow_multiple_tables: + table = columns[0].table + others = [c for c in columns[1:] if c.table is not table] + if others: + raise exc.ArgumentError( + "Column(s) %s are not part of table '%s'." % + (", ".join("'%s'" % c for c in others), + table.description) + ) def _set_parent(self, table): for col in self._pending_colargs: @@ -2264,8 +2544,9 @@ def __init__(self, *columns, **kw): arguments are propagated to the :class:`.Constraint` superclass. """ + _autoattach = kw.pop('_autoattach', True) Constraint.__init__(self, **kw) - ColumnCollectionMixin.__init__(self, *columns) + ColumnCollectionMixin.__init__(self, *columns, _autoattach=_autoattach) def _set_parent(self, table): Constraint._set_parent(self, table) @@ -2276,10 +2557,17 @@ def __contains__(self, x): def copy(self, **kw): c = self.__class__(name=self.name, deferrable=self.deferrable, - initially=self.initially, *self.columns.keys()) + initially=self.initially, *self.columns.keys()) return self._schema_item_copy(c) def contains_column(self, col): + """Return True if this constraint contains the given column. + + Note that this object also contains an attribute ``.columns`` + which is a :class:`.ColumnCollection` of :class:`.Column` objects. + + """ + return self.columns.contains_column(col) def __iter__(self): @@ -2293,15 +2581,17 @@ def __len__(self): return len(self.columns._data) -class CheckConstraint(Constraint): +class CheckConstraint(ColumnCollectionConstraint): """A table- or column-level CHECK constraint. Can be included in the definition of a Table or Column. """ + _allow_multiple_tables = True + def __init__(self, sqltext, name=None, deferrable=None, - initially=None, table=None, _create_rule=None, - _autoattach=True): + initially=None, table=None, info=None, _create_rule=None, + _autoattach=True, _type_bound=False): """Construct a CHECK constraint. :param sqltext: @@ -2323,20 +2613,26 @@ def __init__(self, sqltext, name=None, deferrable=None, Optional string. If set, emit INITIALLY when issuing DDL for this constraint. + :param info: Optional data dictionary which will be populated into the + :attr:`.SchemaItem.info` attribute of this object. + + .. versionadded:: 1.0.0 + """ + self.sqltext = _literal_as_text(sqltext, warn=False) + + columns = [] + visitors.traverse(self.sqltext, {}, {'column': columns.append}) + super(CheckConstraint, self).\ - __init__(name, deferrable, initially, _create_rule) - self.sqltext = _literal_as_text(sqltext) + __init__( + name=name, deferrable=deferrable, + initially=initially, _create_rule=_create_rule, info=info, + _type_bound=_type_bound, _autoattach=_autoattach, + *columns) if table is not None: self._set_parent_with_dispatch(table) - elif _autoattach: - cols = _find_columns(self.sqltext) - tables = set([c.table for c in cols - if isinstance(c.table, Table)]) - if len(tables) == 1: - self._set_parent_with_dispatch( - tables.pop()) def __visit_name__(self): if isinstance(self.parent, Table): @@ -2356,22 +2652,24 @@ def replace(col): else: sqltext = self.sqltext c = CheckConstraint(sqltext, - name=self.name, - initially=self.initially, - deferrable=self.deferrable, - _create_rule=self._create_rule, - table=target_table, - _autoattach=False) + name=self.name, + initially=self.initially, + deferrable=self.deferrable, + _create_rule=self._create_rule, + table=target_table, + _autoattach=False, + _type_bound=self._type_bound) return self._schema_item_copy(c) -class ForeignKeyConstraint(Constraint): +class ForeignKeyConstraint(ColumnCollectionConstraint): """A table-level FOREIGN KEY constraint. Defines a single column or composite FOREIGN KEY ... REFERENCES constraint. For a no-frills, single column foreign key, adding a - :class:`.ForeignKey` to the definition of a :class:`.Column` is a shorthand - equivalent for an unnamed, single column :class:`.ForeignKeyConstraint`. + :class:`.ForeignKey` to the definition of a :class:`.Column` is a + shorthand equivalent for an unnamed, single column + :class:`.ForeignKeyConstraint`. Examples of foreign key configuration are in :ref:`metadata_foreignkeys`. @@ -2379,8 +2677,9 @@ class ForeignKeyConstraint(Constraint): __visit_name__ = 'foreign_key_constraint' def __init__(self, columns, refcolumns, name=None, onupdate=None, - ondelete=None, deferrable=None, initially=None, use_alter=False, - link_to_name=False, match=None, table=None, **dialect_kw): + ondelete=None, deferrable=None, initially=None, + use_alter=False, link_to_name=False, match=None, + table=None, info=None, **dialect_kw): """Construct a composite-capable FOREIGN KEY. :param columns: A sequence of local column names. The named columns @@ -2415,142 +2714,188 @@ def __init__(self, columns, refcolumns, name=None, onupdate=None, part of the CREATE TABLE definition. Instead, generate it via an ALTER TABLE statement issued after the full collection of tables have been created, and drop it via an ALTER TABLE statement before - the full collection of tables are dropped. This is shorthand for the - usage of :class:`.AddConstraint` and :class:`.DropConstraint` applied - as "after-create" and "before-drop" events on the MetaData object. - This is normally used to generate/drop constraints on objects that - are mutually dependent on each other. + the full collection of tables are dropped. + + The use of :paramref:`.ForeignKeyConstraint.use_alter` is + particularly geared towards the case where two or more tables + are established within a mutually-dependent foreign key constraint + relationship; however, the :meth:`.MetaData.create_all` and + :meth:`.MetaData.drop_all` methods will perform this resolution + automatically, so the flag is normally not needed. + + .. versionchanged:: 1.0.0 Automatic resolution of foreign key + cycles has been added, removing the need to use the + :paramref:`.ForeignKeyConstraint.use_alter` in typical use + cases. + + .. seealso:: + + :ref:`use_alter` :param match: Optional string. If set, emit MATCH when issuing - DDL for this constraint. Typical values include SIMPLE, PARTIAL - and FULL. + DDL for this constraint. Typical values include SIMPLE, PARTIAL + and FULL. + + :param info: Optional data dictionary which will be populated into the + :attr:`.SchemaItem.info` attribute of this object. - :param \**dialect_kw: Additional keyword arguments are dialect specific, - and passed in the form ``_``. See the - documentation regarding an individual dialect at :ref:`dialect_toplevel` - for detail on documented arguments. + .. versionadded:: 1.0.0 + + :param \**dialect_kw: Additional keyword arguments are dialect + specific, and passed in the form ``_``. See + the documentation regarding an individual dialect at + :ref:`dialect_toplevel` for detail on documented arguments. .. versionadded:: 0.9.2 """ - super(ForeignKeyConstraint, self).\ - __init__(name, deferrable, initially, **dialect_kw) + Constraint.__init__( + self, name=name, deferrable=deferrable, initially=initially, + info=info, **dialect_kw) self.onupdate = onupdate self.ondelete = ondelete self.link_to_name = link_to_name - if self.name is None and use_alter: - raise exc.ArgumentError("Alterable Constraint requires a name") self.use_alter = use_alter self.match = match - self._elements = util.OrderedDict() - # standalone ForeignKeyConstraint - create # associated ForeignKey objects which will be applied to hosted # Column objects (in col.foreign_keys), either now or when attached # to the Table for string-specified names - for col, refcol in zip(columns, refcolumns): - self._elements[col] = ForeignKey( - refcol, - _constraint=self, - name=self.name, - onupdate=self.onupdate, - ondelete=self.ondelete, - use_alter=self.use_alter, - link_to_name=self.link_to_name, - match=self.match, - deferrable=self.deferrable, - initially=self.initially, - **self.dialect_kwargs - ) + self.elements = [ + ForeignKey( + refcol, + _constraint=self, + name=self.name, + onupdate=self.onupdate, + ondelete=self.ondelete, + use_alter=self.use_alter, + link_to_name=self.link_to_name, + match=self.match, + deferrable=self.deferrable, + initially=self.initially, + **self.dialect_kwargs + ) for refcol in refcolumns + ] + ColumnCollectionMixin.__init__(self, *columns) if table is not None: + if hasattr(self, "parent"): + assert table is self.parent self._set_parent_with_dispatch(table) - elif columns and \ - isinstance(columns[0], Column) and \ - columns[0].table is not None: - self._set_parent_with_dispatch(columns[0].table) + + def _append_element(self, column, fk): + self.columns.add(column) + self.elements.append(fk) + + @property + def _elements(self): + # legacy - provide a dictionary view of (column_key, fk) + return util.OrderedDict( + zip(self.column_keys, self.elements) + ) @property def _referred_schema(self): - for elem in self._elements.values(): + for elem in self.elements: return elem._referred_schema else: return None + @property + def referred_table(self): + """The :class:`.Table` object to which this + :class:`.ForeignKeyConstraint` references. + + This is a dynamically calculated attribute which may not be available + if the constraint and/or parent table is not yet associated with + a metadata collection that contains the referred table. + + .. versionadded:: 1.0.0 + + """ + return self.elements[0].column.table + def _validate_dest_table(self, table): - table_keys = set([elem._table_key() for elem in self._elements.values()]) + table_keys = set([elem._table_key() + for elem in self.elements]) if None not in table_keys and len(table_keys) > 1: elem0, elem1 = sorted(table_keys)[0:2] raise exc.ArgumentError( 'ForeignKeyConstraint on %s(%s) refers to ' 'multiple remote tables: %s and %s' % ( - table.fullname, - self._col_description, - elem0, - elem1 - )) + table.fullname, + self._col_description, + elem0, + elem1 + )) @property - def _col_description(self): - return ", ".join(self._elements) + def column_keys(self): + """Return a list of string keys representing the local + columns in this :class:`.ForeignKeyConstraint`. - @property - def columns(self): - return list(self._elements) + This list is either the original string arguments sent + to the constructor of the :class:`.ForeignKeyConstraint`, + or if the constraint has been initialized with :class:`.Column` + objects, is the string .key of each element. + + .. versionadded:: 1.0.0 + + """ + if hasattr(self, "parent"): + return self.columns.keys() + else: + return [ + col.key if isinstance(col, ColumnElement) + else str(col) for col in self._pending_colargs + ] @property - def elements(self): - return list(self._elements.values()) + def _col_description(self): + return ", ".join(self.column_keys) def _set_parent(self, table): - super(ForeignKeyConstraint, self)._set_parent(table) - - self._validate_dest_table(table) + Constraint._set_parent(self, table) - for col, fk in self._elements.items(): - # string-specified column names now get - # resolved to Column objects - if isinstance(col, util.string_types): - try: - col = table.c[col] - except KeyError: - raise exc.ArgumentError( - "Can't create ForeignKeyConstraint " - "on table '%s': no column " - "named '%s' is present." % (table.description, col)) + try: + ColumnCollectionConstraint._set_parent(self, table) + except KeyError as ke: + raise exc.ArgumentError( + "Can't create ForeignKeyConstraint " + "on table '%s': no column " + "named '%s' is present." % (table.description, ke.args[0])) + for col, fk in zip(self.columns, self.elements): if not hasattr(fk, 'parent') or \ fk.parent is not col: fk._set_parent_with_dispatch(col) - if self.use_alter: - def supports_alter(ddl, event, schema_item, bind, **kw): - return table in set(kw['tables']) and \ - bind.dialect.supports_alter - - event.listen(table.metadata, "after_create", - ddl.AddConstraint(self, on=supports_alter)) - event.listen(table.metadata, "before_drop", - ddl.DropConstraint(self, on=supports_alter)) + self._validate_dest_table(table) - def copy(self, schema=None, **kw): + def copy(self, schema=None, target_table=None, **kw): fkc = ForeignKeyConstraint( - [x.parent.key for x in self._elements.values()], - [x._get_colspec(schema=schema) for x in self._elements.values()], - name=self.name, - onupdate=self.onupdate, - ondelete=self.ondelete, - use_alter=self.use_alter, - deferrable=self.deferrable, - initially=self.initially, - link_to_name=self.link_to_name, - match=self.match - ) + [x.parent.key for x in self.elements], + [x._get_colspec( + schema=schema, + table_name=target_table.name + if target_table is not None + and x._table_key() == x.parent.table.key + else None) + for x in self.elements], + name=self.name, + onupdate=self.onupdate, + ondelete=self.ondelete, + use_alter=self.use_alter, + deferrable=self.deferrable, + initially=self.initially, + link_to_name=self.link_to_name, + match=self.match + ) for self_fk, other_fk in zip( - self._elements.values(), - fkc._elements.values()): + self.elements, + fkc.elements): self_fk._schema_item_copy(other_fk) return self._schema_item_copy(fkc) @@ -2570,8 +2915,10 @@ class PrimaryKeyConstraint(ColumnCollectionConstraint): ... ) >>> my_table.primary_key PrimaryKeyConstraint( - Column('id', Integer(), table=, primary_key=True, nullable=False), - Column('version_id', Integer(), table=, primary_key=True, nullable=False) + Column('id', Integer(), table=, + primary_key=True, nullable=False), + Column('version_id', Integer(), table=, + primary_key=True, nullable=False) ) The primary key of a :class:`.Table` can also be specified by using @@ -2583,7 +2930,8 @@ class PrimaryKeyConstraint(ColumnCollectionConstraint): Column('id', Integer), Column('version_id', Integer), Column('data', String(50)), - PrimaryKeyConstraint('id', 'version_id', name='mytable_pk') + PrimaryKeyConstraint('id', 'version_id', + name='mytable_pk') ) The two styles of column-specification should generally not be mixed. @@ -2591,9 +2939,9 @@ class PrimaryKeyConstraint(ColumnCollectionConstraint): :class:`.PrimaryKeyConstraint` don't match the columns that were marked as ``primary_key=True``, if both are present; in this case, the columns are taken strictly from the - :class:`.PrimaryKeyConstraint` declaration, and those columns otherwise marked - as ``primary_key=True`` are ignored. This behavior is intended to be - backwards compatible with previous behavior. + :class:`.PrimaryKeyConstraint` declaration, and those columns otherwise + marked as ``primary_key=True`` are ignored. This behavior is intended to + be backwards compatible with previous behavior. .. versionchanged:: 0.9.2 Using a mixture of columns within a :class:`.PrimaryKeyConstraint` in addition to columns marked as @@ -2603,23 +2951,26 @@ class PrimaryKeyConstraint(ColumnCollectionConstraint): may raise an exception in a future release. For the use case where specific options are to be specified on the - :class:`.PrimaryKeyConstraint`, but the usual style of using ``primary_key=True`` - flags is still desirable, an empty :class:`.PrimaryKeyConstraint` may be - specified, which will take on the primary key column collection from - the :class:`.Table` based on the flags:: + :class:`.PrimaryKeyConstraint`, but the usual style of using + ``primary_key=True`` flags is still desirable, an empty + :class:`.PrimaryKeyConstraint` may be specified, which will take on the + primary key column collection from the :class:`.Table` based on the + flags:: my_table = Table('mytable', metadata, Column('id', Integer, primary_key=True), Column('version_id', Integer, primary_key=True), Column('data', String(50)), - PrimaryKeyConstraint(name='mytable_pk', mssql_clustered=True) + PrimaryKeyConstraint(name='mytable_pk', + mssql_clustered=True) ) .. versionadded:: 0.9.2 an empty :class:`.PrimaryKeyConstraint` may now - be specified for the purposes of establishing keyword arguments with the - constraint, independently of the specification of "primary key" columns - within the :class:`.Table` itself; columns marked as ``primary_key=True`` - will be gathered into the empty constraint's column collection. + be specified for the purposes of establishing keyword arguments with + the constraint, independently of the specification of "primary key" + columns within the :class:`.Table` itself; columns marked as + ``primary_key=True`` will be gathered into the empty constraint's + column collection. """ @@ -2635,23 +2986,24 @@ def _set_parent(self, table): table_pks = [c for c in table.c if c.primary_key] if self.columns and table_pks and \ - set(table_pks) != set(self.columns.values()): + set(table_pks) != set(self.columns.values()): util.warn( - "Table '%s' specifies columns %s as primary_key=True, " - "not matching locally specified columns %s; setting the " - "current primary key columns to %s. This warning " - "may become an exception in a future release" % - ( - table.name, - ", ".join("'%s'" % c.name for c in table_pks), - ", ".join("'%s'" % c.name for c in self.columns), - ", ".join("'%s'" % c.name for c in self.columns) - ) + "Table '%s' specifies columns %s as primary_key=True, " + "not matching locally specified columns %s; setting the " + "current primary key columns to %s. This warning " + "may become an exception in a future release" % + ( + table.name, + ", ".join("'%s'" % c.name for c in table_pks), + ", ".join("'%s'" % c.name for c in self.columns), + ", ".join("'%s'" % c.name for c in self.columns) ) + ) table_pks[:] = [] for c in self.columns: c.primary_key = True + c.nullable = False self.columns.extend(table_pks) def _reload(self, columns): @@ -2724,19 +3076,47 @@ class Index(DialectKWArgs, ColumnCollectionMixin, SchemaItem): Index("some_index", sometable.c.name, sometable.c.address) - Functional indexes are supported as well, keeping in mind that at least - one :class:`.Column` must be present:: + Functional indexes are supported as well, typically by using the + :data:`.func` construct in conjunction with table-bound + :class:`.Column` objects:: Index("some_index", func.lower(sometable.c.name)) .. versionadded:: 0.8 support for functional and expression-based indexes. + An :class:`.Index` can also be manually associated with a :class:`.Table`, + either through inline declaration or using + :meth:`.Table.append_constraint`. When this approach is used, the names + of the indexed columns can be specified as strings:: + + Table("sometable", metadata, + Column("name", String(50)), + Column("address", String(100)), + Index("some_index", "name", "address") + ) + + To support functional or expression-based indexes in this form, the + :func:`.text` construct may be used:: + + from sqlalchemy import text + + Table("sometable", metadata, + Column("name", String(50)), + Column("address", String(100)), + Index("some_index", text("lower(name)")) + ) + + .. versionadded:: 0.9.5 the :func:`.text` construct may be used to + specify :class:`.Index` expressions, provided the :class:`.Index` + is explicitly associated with the :class:`.Table`. + + .. seealso:: :ref:`schema_indexes` - General information on :class:`.Index`. - :ref:`postgresql_indexes` - PostgreSQL-specific options available for the - :class:`.Index` construct. + :ref:`postgresql_indexes` - PostgreSQL-specific options available for + the :class:`.Index` construct. :ref:`mysql_indexes` - MySQL-specific options available for the :class:`.Index` construct. @@ -2757,7 +3137,7 @@ def __init__(self, name, *expressions, **kw): :param \*expressions: Column expressions to include in the index. The expressions are normally instances of :class:`.Column`, but may also - be arbitrary SQL expressions which ultmately refer to a + be arbitrary SQL expressions which ultimately refer to a :class:`.Column`. :param unique=False: @@ -2768,37 +3148,37 @@ def __init__(self, name, *expressions, **kw): the index. Works in the same manner as that of :paramref:`.Column.quote`. + :param info=None: Optional data dictionary which will be populated + into the :attr:`.SchemaItem.info` attribute of this object. + + .. versionadded:: 1.0.0 + :param \**kw: Additional keyword arguments not mentioned above are - dialect specific, and passed in the form ``_``. - See the documentation regarding an individual dialect at - :ref:`dialect_toplevel` for detail on documented arguments. + dialect specific, and passed in the form + ``_``. See the documentation regarding an + individual dialect at :ref:`dialect_toplevel` for detail on + documented arguments. """ self.table = None columns = [] - for expr in expressions: - if not isinstance(expr, ClauseElement): - columns.append(expr) - else: - cols = [] - visitors.traverse(expr, {}, {'column': cols.append}) - if cols: - columns.append(cols[0]) - else: - columns.append(expr) + for expr, column, strname, add_element in self.\ + _extract_col_expression_collection(expressions): + if add_element is not None: + columns.append(add_element) self.expressions = expressions self.name = quoted_name(name, kw.pop("quote", None)) self.unique = kw.pop('unique', False) + if 'info' in kw: + self.info = kw.pop('info') self._validate_dialect_kwargs(kw) # will call _set_parent() if table-bound column # objects are present ColumnCollectionMixin.__init__(self, *columns) - - def _set_parent(self, table): ColumnCollectionMixin._set_parent(self, table) @@ -2812,18 +3192,13 @@ def _set_parent(self, table): ) ) self.table = table - for c in self.columns: - if c.table != self.table: - raise exc.ArgumentError( - "Column '%s' is not part of table '%s'." % - (c, self.table.description) - ) table.indexes.add(self) self.expressions = [ expr if isinstance(expr, ClauseElement) else colexpr - for expr, colexpr in zip(self.expressions, self.columns) + for expr, colexpr in util.zip_longest(self.expressions, + self.columns) ] @property @@ -2863,11 +3238,11 @@ def drop(self, bind=None): def __repr__(self): return 'Index(%s)' % ( - ", ".join( - [repr(self.name)] + - [repr(c) for c in self.columns] + - (self.unique and ["unique=True"] or []) - )) + ", ".join( + [repr(self.name)] + + [repr(e) for e in self.expressions] + + (self.unique and ["unique=True"] or []) + )) DEFAULT_NAMING_CONVENTION = util.immutabledict({ @@ -2888,9 +3263,9 @@ class MetaData(SchemaItem): The :class:`.Table` objects themselves are stored in the :attr:`.MetaData.tables` dictionary. - :class:`.MetaData` is a thread-safe object for read operations. Construction - of new tables within a single :class:`.MetaData` object, either explicitly - or via reflection, may not be completely thread-safe. + :class:`.MetaData` is a thread-safe object for read operations. + Construction of new tables within a single :class:`.MetaData` object, + either explicitly or via reflection, may not be completely thread-safe. .. seealso:: @@ -2902,8 +3277,9 @@ class MetaData(SchemaItem): def __init__(self, bind=None, reflect=False, schema=None, quote_schema=None, - naming_convention=DEFAULT_NAMING_CONVENTION - ): + naming_convention=DEFAULT_NAMING_CONVENTION, + info=None + ): """Create a new MetaData object. :param bind: @@ -2928,6 +3304,11 @@ def __init__(self, bind=None, reflect=False, schema=None, :class:`.Sequence`, and other objects which make usage of the local ``schema`` name. + :param info: Optional data dictionary which will be populated into the + :attr:`.SchemaItem.info` attribute of this object. + + .. versionadded:: 1.0.0 + :param naming_convention: a dictionary referring to values which will establish default naming conventions for :class:`.Constraint` and :class:`.Index` objects, for those objects which are not given @@ -2948,9 +3329,9 @@ def __init__(self, bind=None, reflect=False, schema=None, The values associated with each "constraint class" or "constraint mnemonic" key are string naming templates, such as ``"uq_%(table_name)s_%(column_0_name)s"``, - which decribe how the name should be composed. The values associated - with user-defined "token" keys should be callables of the form - ``fn(constraint, table)``, which accepts the constraint/index + which describe how the name should be composed. The values + associated with user-defined "token" keys should be callables of the + form ``fn(constraint, table)``, which accepts the constraint/index object and :class:`.Table` as arguments, returning a string result. @@ -2974,14 +3355,15 @@ def __init__(self, bind=None, reflect=False, schema=None, index position "0", e.g. :attr:`.Column.key` * ``%(referred_column_0_name)s`` - the name of a :class:`.Column` - at index position "0" referenced by a :class:`.ForeignKeyConstraint`. + at index position "0" referenced by a + :class:`.ForeignKeyConstraint`. - * ``%(constraint_name)s`` - a special key that refers to the existing - name given to the constraint. When this key is present, the - :class:`.Constraint` object's existing name will be replaced with - one that is composed from template string that uses this token. - When this token is present, it is required that the :class:`.Constraint` - is given an expicit name ahead of time. + * ``%(constraint_name)s`` - a special key that refers to the + existing name given to the constraint. When this key is + present, the :class:`.Constraint` object's existing name will be + replaced with one that is composed from template string that + uses this token. When this token is present, it is required that + the :class:`.Constraint` is given an expicit name ahead of time. * user-defined: any additional token may be implemented by passing it along with a ``fn(constraint, table)`` callable to the @@ -2998,6 +3380,8 @@ def __init__(self, bind=None, reflect=False, schema=None, self.tables = util.immutabledict() self.schema = quoted_name(schema, quote_schema) self.naming_convention = naming_convention + if info: + self.info = info self._schemas = set() self._sequences = {} self._fk_memos = collections.defaultdict(list) @@ -3005,7 +3389,7 @@ def __init__(self, bind=None, reflect=False, schema=None, self.bind = bind if reflect: util.warn_deprecated("reflect=True is deprecate; please " - "use the reflect() method.") + "use the reflect() method.") if not bind: raise exc.ArgumentError( "A bind must be supplied in conjunction " @@ -3040,8 +3424,6 @@ def _add_table(self, name, schema, table): if schema: self._schemas.add(schema) - - def _remove_table(self, name, schema): key = _get_table_key(name, schema) removed = dict.pop(self.tables, key, None) @@ -3050,20 +3432,22 @@ def _remove_table(self, name, schema): fk._remove_from_metadata(self) if self._schemas: self._schemas = set([t.schema - for t in self.tables.values() - if t.schema is not None]) - + for t in self.tables.values() + if t.schema is not None]) def __getstate__(self): return {'tables': self.tables, 'schema': self.schema, 'schemas': self._schemas, 'sequences': self._sequences, - 'fk_memos': self._fk_memos} + 'fk_memos': self._fk_memos, + 'naming_convention': self.naming_convention + } def __setstate__(self, state): self.tables = state['tables'] self.schema = state['schema'] + self.naming_convention = state['naming_convention'] self._bind = None self._sequences = state['sequences'] self._schemas = state['schemas'] @@ -3125,19 +3509,37 @@ def sorted_tables(self): order in which they can be created. To get the order in which the tables would be dropped, use the ``reversed()`` Python built-in. + .. warning:: + + The :attr:`.sorted_tables` accessor cannot by itself accommodate + automatic resolution of dependency cycles between tables, which + are usually caused by mutually dependent foreign key constraints. + To resolve these cycles, either the + :paramref:`.ForeignKeyConstraint.use_alter` parameter may be appled + to those constraints, or use the + :func:`.schema.sort_tables_and_constraints` function which will break + out foreign key constraints involved in cycles separately. + .. seealso:: + :func:`.schema.sort_tables` + + :func:`.schema.sort_tables_and_constraints` + :attr:`.MetaData.tables` :meth:`.Inspector.get_table_names` + :meth:`.Inspector.get_sorted_table_and_fkc_names` + + """ - return ddl.sort_tables(self.tables.values()) + return ddl.sort_tables(sorted(self.tables.values(), key=lambda t: t.key)) def reflect(self, bind=None, schema=None, views=False, only=None, - extend_existing=False, - autoload_replace=True, - **dialect_kwargs): + extend_existing=False, + autoload_replace=True, + **dialect_kwargs): """Load all available table definitions from the database. Automatically creates ``Table`` entries in this ``MetaData`` for any @@ -3182,13 +3584,15 @@ def reflect(self, bind=None, schema=None, views=False, only=None, .. versionadded:: 0.9.1 - :param \**dialect_kwargs: Additional keyword arguments not mentioned above are - dialect specific, and passed in the form ``_``. - See the documentation regarding an individual dialect at - :ref:`dialect_toplevel` for detail on documented arguments. + :param \**dialect_kwargs: Additional keyword arguments not mentioned + above are dialect specific, and passed in the form + ``_``. See the documentation regarding an + individual dialect at :ref:`dialect_toplevel` for detail on + documented arguments. - .. versionadded:: 0.9.2 - Added :paramref:`.MetaData.reflect.**dialect_kwargs` - to support dialect-level reflection options for all :class:`.Table` + .. versionadded:: 0.9.2 - Added + :paramref:`.MetaData.reflect.**dialect_kwargs` to support + dialect-level reflection options for all :class:`.Table` objects reflected. """ @@ -3212,8 +3616,8 @@ def reflect(self, bind=None, schema=None, views=False, only=None, if schema is not None: reflect_opts['schema'] = schema - available = util.OrderedSet(bind.engine.table_names(schema, - connection=conn)) + available = util.OrderedSet( + bind.engine.table_names(schema, connection=conn)) if views: available.update( bind.dialect.get_view_names(conn, schema) @@ -3221,7 +3625,7 @@ def reflect(self, bind=None, schema=None, views=False, only=None, if schema is not None: available_w_schema = util.OrderedSet(["%s.%s" % (schema, name) - for name in available]) + for name in available]) else: available_w_schema = available @@ -3233,9 +3637,9 @@ def reflect(self, bind=None, schema=None, views=False, only=None, if extend_existing or schname not in current] elif util.callable(only): load = [name for name, schname in - zip(available, available_w_schema) - if (extend_existing or schname not in current) - and only(name, self)] + zip(available, available_w_schema) + if (extend_existing or schname not in current) + and only(name, self)] else: missing = [name for name in only if name not in available] if missing: @@ -3245,7 +3649,7 @@ def reflect(self, bind=None, schema=None, views=False, only=None, 'in %s%s: (%s)' % (bind.engine.url, s, ', '.join(missing))) load = [name for name in only if extend_existing or - name not in current] + name not in current] for name in load: Table(name, self, **reflect_opts) @@ -3286,9 +3690,9 @@ def create_all(self, bind=None, tables=None, checkfirst=True): if bind is None: bind = _bind_or_error(self) bind._run_visitor(ddl.SchemaGenerator, - self, - checkfirst=checkfirst, - tables=tables) + self, + checkfirst=checkfirst, + tables=tables) def drop_all(self, bind=None, tables=None, checkfirst=True): """Drop all tables stored in this metadata. @@ -3313,9 +3717,9 @@ def drop_all(self, bind=None, tables=None, checkfirst=True): if bind is None: bind = _bind_or_error(self) bind._run_visitor(ddl.SchemaDropper, - self, - checkfirst=checkfirst, - tables=tables) + self, + checkfirst=checkfirst, + tables=tables) class ThreadLocalMetaData(MetaData): @@ -3362,7 +3766,7 @@ def _bind_to(self, url, bind): self.__engines[bind] = e self.context._engine = e else: - # TODO: this is squirrely. we shouldnt have to hold onto engines + # TODO: this is squirrely. we shouldn't have to hold onto engines # in a case like this if bind not in self.__engines: self.__engines[bind] = bind @@ -3381,6 +3785,3 @@ def dispose(self): for e in self.__engines.values(): if hasattr(e, 'dispose'): e.dispose() - - - diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index f64a70ec8d..b986fd5ae6 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -1,5 +1,6 @@ # sql/selectable.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -10,13 +11,14 @@ """ from .elements import ClauseElement, TextClause, ClauseList, \ - and_, Grouping, UnaryExpression, literal_column + and_, Grouping, UnaryExpression, literal_column, BindParameter from .elements import _clone, \ - _literal_as_text, _interpret_as_column_or_from, _expand_cloned,\ - _select_iterables, _anonymous_label, _clause_element_as_expr,\ - _cloned_intersection, _cloned_difference, True_, _only_column_elements + _literal_as_text, _interpret_as_column_or_from, _expand_cloned,\ + _select_iterables, _anonymous_label, _clause_element_as_expr,\ + _cloned_intersection, _cloned_difference, True_, \ + _literal_as_label_reference, _literal_and_labels_as_label_reference from .base import Immutable, Executable, _generative, \ - ColumnCollection, ColumnSet, _from_objects, Generative + ColumnCollection, ColumnSet, _from_objects, Generative from . import type_api from .. import inspection from .. import util @@ -27,24 +29,79 @@ import collections from .annotation import Annotated import itertools +from sqlalchemy.sql.visitors import Visitable + def _interpret_as_from(element): insp = inspection.inspect(element, raiseerr=False) if insp is None: if isinstance(element, util.string_types): + util.warn_limited( + "Textual SQL FROM expression %(expr)r should be " + "explicitly declared as text(%(expr)r), " + "or use table(%(expr)r) for more specificity", + {"expr": util.ellipses_string(element)}) + return TextClause(util.text_type(element)) - elif hasattr(insp, "selectable"): + try: return insp.selectable - raise exc.ArgumentError("FROM expression expected") + except AttributeError: + raise exc.ArgumentError("FROM expression expected") + def _interpret_as_select(element): element = _interpret_as_from(element) if isinstance(element, Alias): element = element.original - if not isinstance(element, Select): + if not isinstance(element, SelectBase): element = element.select() return element + +class _OffsetLimitParam(BindParameter): + @property + def _limit_offset_value(self): + return self.effective_value + + +def _offset_or_limit_clause(element, name=None, type_=None): + """Convert the given value to an "offset or limit" clause. + + This handles incoming integers and converts to an expression; if + an expression is already given, it is passed through. + + """ + if element is None: + return None + elif hasattr(element, '__clause_element__'): + return element.__clause_element__() + elif isinstance(element, Visitable): + return element + else: + value = util.asint(element) + return _OffsetLimitParam(name, value, type_=type_, unique=True) + + +def _offset_or_limit_clause_asint(clause, attrname): + """Convert the "offset or limit" clause of a select construct to an + integer. + + This is only possible if the value is stored as a simple bound parameter. + Otherwise, a compilation error is raised. + + """ + if clause is None: + return None + try: + value = clause._limit_offset_value + except AttributeError: + raise exc.CompileError( + "This SELECT structure does not use a simple " + "integer value for %s" % attrname) + else: + return util.asint(value) + + def subquery(alias, *args, **kwargs): """Return an :class:`.Alias` object derived from a :class:`.Select`. @@ -61,7 +118,6 @@ def subquery(alias, *args, **kwargs): return Select(*args, **kwargs).alias(alias) - def alias(selectable, name=None, flat=False): """Return an :class:`.Alias` object. @@ -116,6 +172,79 @@ def selectable(self): return self +class HasPrefixes(object): + _prefixes = () + + @_generative + def prefix_with(self, *expr, **kw): + """Add one or more expressions following the statement keyword, i.e. + SELECT, INSERT, UPDATE, or DELETE. Generative. + + This is used to support backend-specific prefix keywords such as those + provided by MySQL. + + E.g.:: + + stmt = table.insert().prefix_with("LOW_PRIORITY", dialect="mysql") + + Multiple prefixes can be specified by multiple calls + to :meth:`.prefix_with`. + + :param \*expr: textual or :class:`.ClauseElement` construct which + will be rendered following the INSERT, UPDATE, or DELETE + keyword. + :param \**kw: A single keyword 'dialect' is accepted. This is an + optional string dialect name which will + limit rendering of this prefix to only that dialect. + + """ + dialect = kw.pop('dialect', None) + if kw: + raise exc.ArgumentError("Unsupported argument(s): %s" % + ",".join(kw)) + self._setup_prefixes(expr, dialect) + + def _setup_prefixes(self, prefixes, dialect=None): + self._prefixes = self._prefixes + tuple( + [(_literal_as_text(p, warn=False), dialect) for p in prefixes]) + + +class HasSuffixes(object): + _suffixes = () + + @_generative + def suffix_with(self, *expr, **kw): + """Add one or more expressions following the statement as a whole. + + This is used to support backend-specific suffix keywords on + certain constructs. + + E.g.:: + + stmt = select([col1, col2]).cte().suffix_with( + "cycle empno set y_cycle to 1 default 0", dialect="oracle") + + Multiple suffixes can be specified by multiple calls + to :meth:`.suffix_with`. + + :param \*expr: textual or :class:`.ClauseElement` construct which + will be rendered following the target clause. + :param \**kw: A single keyword 'dialect' is accepted. This is an + optional string dialect name which will + limit rendering of this suffix to only that dialect. + + """ + dialect = kw.pop('dialect', None) + if kw: + raise exc.ArgumentError("Unsupported argument(s): %s" % + ",".join(kw)) + self._setup_suffixes(expr, dialect) + + def _setup_suffixes(self, suffixes, dialect=None): + self._suffixes = self._suffixes + tuple( + [(_literal_as_text(p, warn=False), dialect) for p in suffixes]) + + class FromClause(Selectable): """Represent an element that can be used within the ``FROM`` clause of a ``SELECT`` statement. @@ -149,8 +278,9 @@ class FromClause(Selectable): schema = None """Define the 'schema' attribute for this :class:`.FromClause`. - This is typically ``None`` for most objects except that of :class:`.Table`, - where it is taken as the value of the :paramref:`.Table.schema` argument. + This is typically ``None`` for most objects except that of + :class:`.Table`, where it is taken as the value of the + :paramref:`.Table.schema` argument. """ @@ -166,10 +296,10 @@ def count(self, functions, whereclause=None, **params): else: col = list(self.columns)[0] return Select( - [functions.func.count(col).label('tbl_row_count')], - whereclause, - from_obj=[self], - **params) + [functions.func.count(col).label('tbl_row_count')], + whereclause, + from_obj=[self], + **params) def select(self, whereclause=None, **params): """return a SELECT of this :class:`.FromClause`. @@ -200,9 +330,10 @@ def join(self, right, onclause=None, isouter=False): SELECT user.id, user.name FROM user JOIN address ON user.id = address.user_id - :param right: the right side of the join; this is any :class:`.FromClause` - object such as a :class:`.Table` object, and may also be a selectable-compatible - object such as an ORM-mapped class. + :param right: the right side of the join; this is any + :class:`.FromClause` object such as a :class:`.Table` object, and + may also be a selectable-compatible object such as an ORM-mapped + class. :param onclause: a SQL expression representing the ON clause of the join. If left at ``None``, :meth:`.FromClause.join` will attempt to @@ -234,12 +365,15 @@ def outerjoin(self, right, onclause=None): The above is equivalent to:: - j = user_table.join(address_table, - user_table.c.id == address_table.c.user_id, isouter=True) + j = user_table.join( + address_table, + user_table.c.id == address_table.c.user_id, + isouter=True) - :param right: the right side of the join; this is any :class:`.FromClause` - object such as a :class:`.Table` object, and may also be a selectable-compatible - object such as an ORM-mapped class. + :param right: the right side of the join; this is any + :class:`.FromClause` object such as a :class:`.Table` object, and + may also be a selectable-compatible object such as an ORM-mapped + class. :param onclause: a SQL expression representing the ON clause of the join. If left at ``None``, :meth:`.FromClause.join` will attempt to @@ -323,8 +457,8 @@ def corresponding_column(self, column, require_embedded=False): :param column: the target :class:`.ColumnElement` to be matched :param require_embedded: only return corresponding columns for - the given :class:`.ColumnElement`, if the given :class:`.ColumnElement` - is actually present within a sub-element + the given :class:`.ColumnElement`, if the given + :class:`.ColumnElement` is actually present within a sub-element of this :class:`.FromClause`. Normally the column will match if it merely shares a common ancestor with one of the exported columns of this :class:`.FromClause`. @@ -334,7 +468,7 @@ def corresponding_column(self, column, require_embedded=False): def embedded(expanded_proxy_set, target_set): for t in target_set.difference(expanded_proxy_set): if not set(_expand_cloned([t]) - ).intersection(expanded_proxy_set): + ).intersection(expanded_proxy_set): return False return True @@ -374,12 +508,14 @@ def embedded(expanded_proxy_set, target_set): # columns that have no reference to the target # column (also occurs with CompoundSelect) - col_distance = util.reduce(operator.add, - [sc._annotations.get('weight', 1) for sc in - col.proxy_set if sc.shares_lineage(column)]) - c_distance = util.reduce(operator.add, - [sc._annotations.get('weight', 1) for sc in - c.proxy_set if sc.shares_lineage(column)]) + col_distance = util.reduce( + operator.add, + [sc._annotations.get('weight', 1) for sc in + col.proxy_set if sc.shares_lineage(column)]) + c_distance = util.reduce( + operator.add, + [sc._annotations.get('weight', 1) for sc in + c.proxy_set if sc.shares_lineage(column)]) if c_distance < col_distance: col, intersect = c, i return col @@ -435,7 +571,7 @@ def foreign_keys(self): return self.foreign_keys c = property(attrgetter('columns'), - doc="An alias for the :attr:`.columns` attribute.") + doc="An alias for the :attr:`.columns` attribute.") _select_iterable = property(attrgetter('columns')) def _init_collections(self): @@ -468,7 +604,7 @@ def _refresh_for_new_column(self, column): to its .c. collection when a Column has been added to one of the Table objects it ultimtely derives from. - If the given selectable hasn't populated it's .c. collection yet, + If the given selectable hasn't populated its .c. collection yet, it should at least pass on the message to the contained selectables, but it will return None. @@ -486,7 +622,8 @@ def _refresh_for_new_column(self, column): """ if not self._cols_populated: return None - elif column.key in self.columns and self.columns[column.key] is column: + elif (column.key in self.columns and + self.columns[column.key] is column): return column else: return None @@ -554,7 +691,6 @@ def _create_outerjoin(cls, left, right, onclause=None): """ return cls(left, right, onclause, isouter=True) - @classmethod def _create_join(cls, left, right, onclause=None, isouter=False): """Produce a :class:`.Join` object, given two :class:`.FromClause` @@ -562,7 +698,8 @@ def _create_join(cls, left, right, onclause=None, isouter=False): E.g.:: - j = join(user_table, address_table, user_table.c.id == address_table.c.user_id) + j = join(user_table, address_table, + user_table.c.id == address_table.c.user_id) stmt = select([user_table]).select_from(j) would emit SQL along the lines of:: @@ -570,15 +707,16 @@ def _create_join(cls, left, right, onclause=None, isouter=False): SELECT user.id, user.name FROM user JOIN address ON user.id = address.user_id - Similar functionality is available given any :class:`.FromClause` object - (e.g. such as a :class:`.Table`) using the :meth:`.FromClause.join` - method. + Similar functionality is available given any + :class:`.FromClause` object (e.g. such as a :class:`.Table`) using + the :meth:`.FromClause.join` method. :param left: The left side of the join. - :param right: the right side of the join; this is any :class:`.FromClause` - object such as a :class:`.Table` object, and may also be a selectable-compatible - object such as an ORM-mapped class. + :param right: the right side of the join; this is any + :class:`.FromClause` object such as a :class:`.Table` object, and + may also be a selectable-compatible object such as an ORM-mapped + class. :param onclause: a SQL expression representing the ON clause of the join. If left at ``None``, :meth:`.FromClause.join` will attempt to @@ -596,7 +734,6 @@ def _create_join(cls, left, right, onclause=None, isouter=False): return cls(left, right, onclause, isouter) - @property def description(self): return "Join object on %s(%d) and %s(%d)" % ( @@ -607,8 +744,8 @@ def description(self): def is_derived_from(self, fromclause): return fromclause is self or \ - self.left.is_derived_from(fromclause) or \ - self.right.is_derived_from(fromclause) + self.left.is_derived_from(fromclause) or \ + self.right.is_derived_from(fromclause) def self_group(self, against=None): return FromGrouping(self) @@ -616,13 +753,13 @@ def self_group(self, against=None): @util.dependencies("sqlalchemy.sql.util") def _populate_column_collection(self, sqlutil): columns = [c for c in self.left.columns] + \ - [c for c in self.right.columns] + [c for c in self.right.columns] self.primary_key.extend(sqlutil.reduce_columns( - (c for c in columns if c.primary_key), self.onclause)) + (c for c in columns if c.primary_key), self.onclause)) self._columns.update((col._label, col) for col in columns) self.foreign_keys.update(itertools.chain( - *[col.foreign_keys for col in columns])) + *[col.foreign_keys for col in columns])) def _refresh_for_new_column(self, column): col = self.left._refresh_for_new_column(column) @@ -683,16 +820,43 @@ def _join_condition(cls, a, b, ignore_nonexistent_tables=False, providing a "natural join". """ + constraints = cls._joincond_scan_left_right( + a, a_subset, b, consider_as_foreign_keys) + + if len(constraints) > 1: + cls._joincond_trim_constraints( + a, b, constraints, consider_as_foreign_keys) + + if len(constraints) == 0: + if isinstance(b, FromGrouping): + hint = " Perhaps you meant to convert the right side to a "\ + "subquery using alias()?" + else: + hint = "" + raise exc.NoForeignKeysError( + "Can't find any foreign key relationships " + "between '%s' and '%s'.%s" % + (a.description, b.description, hint)) + + crit = [(x == y) for x, y in list(constraints.values())[0]] + if len(crit) == 1: + return (crit[0]) + else: + return and_(*crit) + + @classmethod + def _joincond_scan_left_right( + cls, a, a_subset, b, consider_as_foreign_keys): constraints = collections.defaultdict(list) for left in (a_subset, a): if left is None: continue for fk in sorted( - b.foreign_keys, - key=lambda fk: fk.parent._creation_order): + b.foreign_keys, + key=lambda fk: fk.parent._creation_order): if consider_as_foreign_keys is not None and \ - fk.parent not in consider_as_foreign_keys: + fk.parent not in consider_as_foreign_keys: continue try: col = fk.get_referent(left) @@ -706,10 +870,10 @@ def _join_condition(cls, a, b, ignore_nonexistent_tables=False, constraints[fk.constraint].append((col, fk.parent)) if left is not b: for fk in sorted( - left.foreign_keys, - key=lambda fk: fk.parent._creation_order): + left.foreign_keys, + key=lambda fk: fk.parent._creation_order): if consider_as_foreign_keys is not None and \ - fk.parent not in consider_as_foreign_keys: + fk.parent not in consider_as_foreign_keys: continue try: col = fk.get_referent(b) @@ -717,56 +881,41 @@ def _join_condition(cls, a, b, ignore_nonexistent_tables=False, if nrte.table_name == b.name: raise else: - # this is totally covered. can't get - # coverage to mark it. continue if col is not None: constraints[fk.constraint].append((col, fk.parent)) if constraints: break + return constraints + @classmethod + def _joincond_trim_constraints( + cls, a, b, constraints, consider_as_foreign_keys): + # more than one constraint matched. narrow down the list + # to include just those FKCs that match exactly to + # "consider_as_foreign_keys". + if consider_as_foreign_keys: + for const in list(constraints): + if set(f.parent for f in const.elements) != set( + consider_as_foreign_keys): + del constraints[const] + + # if still multiple constraints, but + # they all refer to the exact same end result, use it. if len(constraints) > 1: - # more than one constraint matched. narrow down the list - # to include just those FKCs that match exactly to - # "consider_as_foreign_keys". - if consider_as_foreign_keys: - for const in list(constraints): - if set(f.parent for f in const.elements) != set(consider_as_foreign_keys): - del constraints[const] - - # if still multiple constraints, but - # they all refer to the exact same end result, use it. - if len(constraints) > 1: - dedupe = set(tuple(crit) for crit in constraints.values()) - if len(dedupe) == 1: - key = list(constraints)[0] - constraints = {key: constraints[key]} - - if len(constraints) != 1: - raise exc.AmbiguousForeignKeysError( - "Can't determine join between '%s' and '%s'; " - "tables have more than one foreign key " - "constraint relationship between them. " - "Please specify the 'onclause' of this " - "join explicitly." % (a.description, b.description)) - - if len(constraints) == 0: - if isinstance(b, FromGrouping): - hint = " Perhaps you meant to convert the right side to a "\ - "subquery using alias()?" - else: - hint = "" - raise exc.NoForeignKeysError( - "Can't find any foreign key relationships " - "between '%s' and '%s'.%s" % (a.description, b.description, hint)) - - crit = [(x == y) for x, y in list(constraints.values())[0]] - if len(crit) == 1: - return (crit[0]) - else: - return and_(*crit) - + dedupe = set(tuple(crit) for crit in constraints.values()) + if len(dedupe) == 1: + key = list(constraints)[0] + constraints = {key: constraints[key]} + + if len(constraints) != 1: + raise exc.AmbiguousForeignKeysError( + "Can't determine join between '%s' and '%s'; " + "tables have more than one foreign key " + "constraint relationship between them. " + "Please specify the 'onclause' of this " + "join explicitly." % (a.description, b.description)) def select(self, whereclause=None, **kwargs): """Create a :class:`.Select` from this :class:`.Join`. @@ -799,7 +948,7 @@ def alias(self, sqlutil, name=None, flat=False): """return an alias of this :class:`.Join`. The default behavior here is to first produce a SELECT - construct from this :class:`.Join`, then to produce a + construct from this :class:`.Join`, then to produce an :class:`.Alias` from that. So given a join of the form:: j = table_a.join(table_b, table_a.c.id == table_b.c.a_id) @@ -832,8 +981,8 @@ def alias(self, sqlutil, name=None, flat=False): columns as that of the two individual selectables presented under a single name - the individual columns are "auto-labeled", meaning the ``.c.`` collection of the resulting :class:`.Alias` represents - the names of the individual columns using a ``_`` - scheme:: + the names of the individual columns using a + ``_`` scheme:: j.c.table_a_id j.c.table_b_a_id @@ -896,26 +1045,26 @@ def alias(self, sqlutil, name=None, flat=False): if flat: assert name is None, "Can't send name argument with flat" left_a, right_a = self.left.alias(flat=True), \ - self.right.alias(flat=True) + self.right.alias(flat=True) adapter = sqlutil.ClauseAdapter(left_a).\ - chain(sqlutil.ClauseAdapter(right_a)) + chain(sqlutil.ClauseAdapter(right_a)) - return left_a.join(right_a, - adapter.traverse(self.onclause), isouter=self.isouter) + return left_a.join(right_a, adapter.traverse(self.onclause), + isouter=self.isouter) else: return self.select(use_labels=True, correlate=False).alias(name) @property def _hide_froms(self): return itertools.chain(*[_from_objects(x.left, x.right) - for x in self._cloned_set]) + for x in self._cloned_set]) @property def _from_objects(self): return [self] + \ - self.onclause._from_objects + \ - self.left._from_objects + \ - self.right._from_objects + self.onclause._from_objects + \ + self.left._from_objects + \ + self.right._from_objects class Alias(FromClause): @@ -925,9 +1074,9 @@ class Alias(FromClause): sub-select within a SQL statement using the ``AS`` keyword (or without the keyword on certain databases such as Oracle). - This object is constructed from the :func:`~.expression.alias` module level - function as well as the :meth:`.FromClause.alias` method available on all - :class:`.FromClause` subclasses. + This object is constructed from the :func:`~.expression.alias` module + level function as well as the :meth:`.FromClause.alias` method available + on all :class:`.FromClause` subclasses. """ @@ -949,10 +1098,9 @@ def __init__(self, selectable, name=None): if self.original.named_with_column: name = getattr(self.original, 'name', None) name = _anonymous_label('%%(%d %s)s' % (id(self), name - or 'anon')) + or 'anon')) self.name = name - @property def description(self): if util.py3k: @@ -1014,7 +1162,7 @@ def bind(self): return self.element.bind -class CTE(Alias): +class CTE(Generative, HasSuffixes, Alias): """Represent a Common Table Expression. The :class:`.CTE` object is obtained using the @@ -1027,13 +1175,16 @@ class CTE(Alias): __visit_name__ = 'cte' def __init__(self, selectable, - name=None, - recursive=False, - _cte_alias=None, - _restates=frozenset()): + name=None, + recursive=False, + _cte_alias=None, + _restates=frozenset(), + _suffixes=None): self.recursive = recursive self._cte_alias = _cte_alias self._restates = _restates + if _suffixes: + self._suffixes = _suffixes super(CTE, self).__init__(selectable, name=name) def alias(self, name=None, flat=False): @@ -1042,14 +1193,16 @@ def alias(self, name=None, flat=False): name=name, recursive=self.recursive, _cte_alias=self, - ) + _suffixes=self._suffixes + ) def union(self, other): return CTE( self.original.union(other), name=self.name, recursive=self.recursive, - _restates=self._restates.union([self]) + _restates=self._restates.union([self]), + _suffixes=self._suffixes ) def union_all(self, other): @@ -1057,12 +1210,11 @@ def union_all(self, other): self.original.union_all(other), name=self.name, recursive=self.recursive, - _restates=self._restates.union([self]) + _restates=self._restates.union([self]), + _suffixes=self._suffixes ) - - class FromGrouping(FromClause): """Represent a grouping of a FROM clause""" __visit_name__ = 'grouping' @@ -1114,6 +1266,7 @@ def __getstate__(self): def __setstate__(self, state): self.element = state['element'] + class TableClause(Immutable, FromClause): """Represents a minimal "table" construct. @@ -1121,7 +1274,7 @@ class TableClause(Immutable, FromClause): collection of columns, which are typically produced by the :func:`.expression.column` function:: - from sqlalchemy.sql import table, column + from sqlalchemy import table, column user = table("user", column("id"), @@ -1162,11 +1315,9 @@ def __init__(self, name, *columns): :class:`~.schema.Table` object. It may be used to construct lightweight table constructs. - Note that the :func:`.expression.table` function is not part of - the ``sqlalchemy`` namespace. It must be imported from the - ``sql`` package:: - - from sqlalchemy.sql import table, column + .. versionchanged:: 1.0.0 :func:`.expression.table` can now + be imported from the plain ``sqlalchemy`` namespace like any + other SQL element. :param name: Name of the table. @@ -1212,10 +1363,10 @@ def count(self, functions, whereclause=None, **params): else: col = list(self.columns)[0] return Select( - [functions.func.count(col).label('tbl_row_count')], - whereclause, - from_obj=[self], - **params) + [functions.func.count(col).label('tbl_row_count')], + whereclause, + from_obj=[self], + **params) @util.dependencies("sqlalchemy.sql.dml") def insert(self, dml, values=None, inline=False, **kwargs): @@ -1233,7 +1384,8 @@ def insert(self, dml, values=None, inline=False, **kwargs): return dml.Insert(self, values=values, inline=inline, **kwargs) @util.dependencies("sqlalchemy.sql.dml") - def update(self, dml, whereclause=None, values=None, inline=False, **kwargs): + def update( + self, dml, whereclause=None, values=None, inline=False, **kwargs): """Generate an :func:`.update` construct against this :class:`.TableClause`. @@ -1246,7 +1398,7 @@ def update(self, dml, whereclause=None, values=None, inline=False, **kwargs): """ return dml.Update(self, whereclause=whereclause, - values=values, inline=inline, **kwargs) + values=values, inline=inline, **kwargs) @util.dependencies("sqlalchemy.sql.dml") def delete(self, dml, whereclause=None, **kwargs): @@ -1332,7 +1484,7 @@ def __init__(self, nowait=False, read=False, of=None): self.read = read if of is not None: self.of = [_interpret_as_column_or_from(elem) - for elem in util.to_list(of)] + for elem in util.to_list(of)] else: self.of = None @@ -1360,7 +1512,6 @@ def as_scalar(self): """ return ScalarSelect(self) - def label(self, name): """return a 'scalar' representation of this selectable, embedded as a subquery with a label. @@ -1404,8 +1555,8 @@ def cte(self, name=None, recursive=False): Example 1, non recursive:: - from sqlalchemy import Table, Column, String, Integer, MetaData, \\ - select, func + from sqlalchemy import (Table, Column, String, Integer, + MetaData, select, func) metadata = MetaData() @@ -1443,8 +1594,8 @@ def cte(self, name=None, recursive=False): Example 2, WITH RECURSIVE:: - from sqlalchemy import Table, Column, String, Integer, MetaData, \\ - select, func + from sqlalchemy import (Table, Column, String, Integer, + MetaData, select, func) metadata = MetaData() @@ -1466,8 +1617,8 @@ def cte(self, name=None, recursive=False): parts_alias = parts.alias() included_parts = included_parts.union_all( select([ - parts_alias.c.part, parts_alias.c.sub_part, + parts_alias.c.part, parts_alias.c.quantity ]). where(parts_alias.c.part==incl_alias.c.sub_part) @@ -1477,9 +1628,7 @@ def cte(self, name=None, recursive=False): included_parts.c.sub_part, func.sum(included_parts.c.quantity). label('total_quantity') - ]).\ - select_from(included_parts.join(parts, - included_parts.c.part==parts.c.part)).\\ + ]).\\ group_by(included_parts.c.sub_part) result = conn.execute(statement).fetchall() @@ -1487,7 +1636,8 @@ def cte(self, name=None, recursive=False): .. seealso:: - :meth:`.orm.query.Query.cte` - ORM version of :meth:`.SelectBase.cte`. + :meth:`.orm.query.Query.cte` - ORM version of + :meth:`.SelectBase.cte`. """ return CTE(self, name=name, recursive=recursive) @@ -1518,42 +1668,44 @@ def _generate(self): def _from_objects(self): return [self] + class GenerativeSelect(SelectBase): """Base class for SELECT statements where additional elements can be added. This serves as the base for :class:`.Select` and :class:`.CompoundSelect` - where elements such as ORDER BY, GROUP BY can be added and column rendering - can be controlled. Compare to :class:`.TextAsFrom`, which, while it - subclasses :class:`.SelectBase` and is also a SELECT construct, represents - a fixed textual string which cannot be altered at this level, only - wrapped as a subquery. + where elements such as ORDER BY, GROUP BY can be added and column + rendering can be controlled. Compare to :class:`.TextAsFrom`, which, + while it subclasses :class:`.SelectBase` and is also a SELECT construct, + represents a fixed textual string which cannot be altered at this level, + only wrapped as a subquery. .. versionadded:: 0.9.0 :class:`.GenerativeSelect` was added to - provide functionality specific to :class:`.Select` and :class:`.CompoundSelect` - while allowing :class:`.SelectBase` to be used for other SELECT-like - objects, e.g. :class:`.TextAsFrom`. + provide functionality specific to :class:`.Select` and + :class:`.CompoundSelect` while allowing :class:`.SelectBase` to be + used for other SELECT-like objects, e.g. :class:`.TextAsFrom`. """ _order_by_clause = ClauseList() _group_by_clause = ClauseList() - _limit = None - _offset = None + _limit_clause = None + _offset_clause = None _for_update_arg = None def __init__(self, - use_labels=False, - for_update=False, - limit=None, - offset=None, - order_by=None, - group_by=None, - bind=None, - autocommit=None): + use_labels=False, + for_update=False, + limit=None, + offset=None, + order_by=None, + group_by=None, + bind=None, + autocommit=None): self.use_labels = use_labels if for_update is not False: - self._for_update_arg = ForUpdateArg.parse_legacy_select(for_update) + self._for_update_arg = (ForUpdateArg. + parse_legacy_select(for_update)) if autocommit is not None: util.warn_deprecated('autocommit on select() is ' @@ -1561,17 +1713,21 @@ def __init__(self, 'utocommit=True)') self._execution_options = \ self._execution_options.union( - {'autocommit': autocommit}) + {'autocommit': autocommit}) if limit is not None: - self._limit = util.asint(limit) + self._limit_clause = _offset_or_limit_clause(limit) if offset is not None: - self._offset = util.asint(offset) + self._offset_clause = _offset_or_limit_clause(offset) self._bind = bind if order_by is not None: - self._order_by_clause = ClauseList(*util.to_list(order_by)) + self._order_by_clause = ClauseList( + *util.to_list(order_by), + _literal_as_text=_literal_and_labels_as_label_reference) if group_by is not None: - self._group_by_clause = ClauseList(*util.to_list(group_by)) + self._group_by_clause = ClauseList( + *util.to_list(group_by), + _literal_as_text=_literal_as_label_reference) @property def for_update(self): @@ -1609,8 +1765,8 @@ def with_for_update(self, nowait=False, read=False, of=None): provided which allow for common database-specific variants. - :param nowait: boolean; will render ``FOR UPDATE NOWAIT`` on Oracle and - Postgresql dialects. + :param nowait: boolean; will render ``FOR UPDATE NOWAIT`` on Oracle + and Postgresql dialects. :param read: boolean; will render ``LOCK IN SHARE MODE`` on MySQL, ``FOR SHARE`` on Postgresql. On Postgresql, when combined with @@ -1640,19 +1796,81 @@ def apply_labels(self): """ self.use_labels = True + @property + def _limit(self): + """Get an integer value for the limit. This should only be used + by code that cannot support a limit as a BindParameter or + other custom clause as it will throw an exception if the limit + isn't currently set to an integer. + + """ + return _offset_or_limit_clause_asint(self._limit_clause, "limit") + + @property + def _simple_int_limit(self): + """True if the LIMIT clause is a simple integer, False + if it is not present or is a SQL expression. + """ + return isinstance(self._limit_clause, _OffsetLimitParam) + + @property + def _simple_int_offset(self): + """True if the OFFSET clause is a simple integer, False + if it is not present or is a SQL expression. + """ + return isinstance(self._offset_clause, _OffsetLimitParam) + + @property + def _offset(self): + """Get an integer value for the offset. This should only be used + by code that cannot support an offset as a BindParameter or + other custom clause as it will throw an exception if the + offset isn't currently set to an integer. + + """ + return _offset_or_limit_clause_asint(self._offset_clause, "offset") + @_generative def limit(self, limit): """return a new selectable with the given LIMIT criterion - applied.""" + applied. + + This is a numerical value which usually renders as a ``LIMIT`` + expression in the resulting select. Backends that don't + support ``LIMIT`` will attempt to provide similar + functionality. - self._limit = util.asint(limit) + .. versionchanged:: 1.0.0 - :meth:`.Select.limit` can now + accept arbitrary SQL expressions as well as integer values. + + :param limit: an integer LIMIT parameter, or a SQL expression + that provides an integer result. + + """ + + self._limit_clause = _offset_or_limit_clause(limit) @_generative def offset(self, offset): """return a new selectable with the given OFFSET criterion - applied.""" + applied. + + + This is a numeric value which usually renders as an ``OFFSET`` + expression in the resulting select. Backends that don't + support ``OFFSET`` will attempt to provide similar + functionality. + + + .. versionchanged:: 1.0.0 - :meth:`.Select.offset` can now + accept arbitrary SQL expressions as well as integer values. - self._offset = util.asint(offset) + :param offset: an integer OFFSET parameter, or a SQL expression + that provides an integer result. + + """ + + self._offset_clause = _offset_or_limit_clause(offset) @_generative def order_by(self, *clauses): @@ -1684,8 +1902,8 @@ def append_order_by(self, *clauses): The criterion will be appended to any pre-existing ORDER BY criterion. This is an **in-place** mutation method; the - :meth:`~.GenerativeSelect.order_by` method is preferred, as it provides standard - :term:`method chaining`. + :meth:`~.GenerativeSelect.order_by` method is preferred, as it + provides standard :term:`method chaining`. """ if len(clauses) == 1 and clauses[0] is None: @@ -1693,7 +1911,9 @@ def append_order_by(self, *clauses): else: if getattr(self, '_order_by_clause', None) is not None: clauses = list(self._order_by_clause) + list(clauses) - self._order_by_clause = ClauseList(*clauses) + self._order_by_clause = ClauseList( + *clauses, + _literal_as_text=_literal_and_labels_as_label_reference) def append_group_by(self, *clauses): """Append the given GROUP BY criterion applied to this selectable. @@ -1701,8 +1921,8 @@ def append_group_by(self, *clauses): The criterion will be appended to any pre-existing GROUP BY criterion. This is an **in-place** mutation method; the - :meth:`~.GenerativeSelect.group_by` method is preferred, as it provides standard - :term:`method chaining`. + :meth:`~.GenerativeSelect.group_by` method is preferred, as it + provides standard :term:`method chaining`. """ if len(clauses) == 1 and clauses[0] is None: @@ -1710,7 +1930,18 @@ def append_group_by(self, *clauses): else: if getattr(self, '_group_by_clause', None) is not None: clauses = list(self._group_by_clause) + list(clauses) - self._group_by_clause = ClauseList(*clauses) + self._group_by_clause = ClauseList( + *clauses, _literal_as_text=_literal_as_label_reference) + + @property + def _label_resolve_dict(self): + raise NotImplementedError() + + def _copy_internals(self, clone=_clone, **kw): + if self._limit_clause is not None: + self._limit_clause = clone(self._limit_clause, **kw) + if self._offset_clause is not None: + self._offset_clause = clone(self._offset_clause, **kw) class CompoundSelect(GenerativeSelect): @@ -1759,16 +1990,26 @@ def __init__(self, keyword, *selects, **kwargs): if not numcols: numcols = len(s.c._all_columns) elif len(s.c._all_columns) != numcols: - raise exc.ArgumentError('All selectables passed to ' - 'CompoundSelect must have identical numbers of ' - 'columns; select #%d has %d columns, select ' - '#%d has %d' % (1, len(self.selects[0].c._all_columns), n - + 1, len(s.c._all_columns))) + raise exc.ArgumentError( + 'All selectables passed to ' + 'CompoundSelect must have identical numbers of ' + 'columns; select #%d has %d columns, select ' + '#%d has %d' % + (1, len(self.selects[0].c._all_columns), + n + 1, len(s.c._all_columns)) + ) self.selects.append(s.self_group(self)) GenerativeSelect.__init__(self, **kwargs) + @property + def _label_resolve_dict(self): + d = dict( + (c.key, c) for c in self.c + ) + return d, d + @classmethod def _create_union(cls, *selects, **kwargs): """Return a ``UNION`` of multiple selectables. @@ -1809,7 +2050,6 @@ def _create_union_all(cls, *selects, **kwargs): """ return CompoundSelect(CompoundSelect.UNION_ALL, *selects, **kwargs) - @classmethod def _create_except(cls, *selects, **kwargs): """Return an ``EXCEPT`` of multiple selectables. @@ -1827,7 +2067,6 @@ def _create_except(cls, *selects, **kwargs): """ return CompoundSelect(CompoundSelect.EXCEPT, *selects, **kwargs) - @classmethod def _create_except_all(cls, *selects, **kwargs): """Return an ``EXCEPT ALL`` of multiple selectables. @@ -1845,7 +2084,6 @@ def _create_except_all(cls, *selects, **kwargs): """ return CompoundSelect(CompoundSelect.EXCEPT_ALL, *selects, **kwargs) - @classmethod def _create_intersect(cls, *selects, **kwargs): """Return an ``INTERSECT`` of multiple selectables. @@ -1863,7 +2101,6 @@ def _create_intersect(cls, *selects, **kwargs): """ return CompoundSelect(CompoundSelect.INTERSECT, *selects, **kwargs) - @classmethod def _create_intersect_all(cls, *selects, **kwargs): """Return an ``INTERSECT ALL`` of multiple selectables. @@ -1879,8 +2116,8 @@ def _create_intersect_all(cls, *selects, **kwargs): :func:`select`. """ - return CompoundSelect(CompoundSelect.INTERSECT_ALL, *selects, **kwargs) - + return CompoundSelect( + CompoundSelect.INTERSECT_ALL, *selects, **kwargs) def _scalar_type(self): return self.selects[0]._scalar_type() @@ -1907,9 +2144,9 @@ def _populate_column_collection(self): # ForeignKeys in. this would allow the union() to have all # those fks too. - proxy = cols[0]._make_proxy(self, - name=cols[0]._label if self.use_labels else None, - key=cols[0]._key_label if self.use_labels else None) + proxy = cols[0]._make_proxy( + self, name=cols[0]._label if self.use_labels else None, + key=cols[0]._key_label if self.use_labels else None) # hand-construct the "_proxies" collection to include all # derived columns place a 'weight' annotation corresponding @@ -1917,8 +2154,8 @@ def _populate_column_collection(self): # that the corresponding_column() operation can resolve # conflicts - proxy._proxies = [c._annotate({'weight': i + 1}) for (i, - c) in enumerate(cols)] + proxy._proxies = [ + c._annotate({'weight': i + 1}) for (i, c) in enumerate(cols)] def _refresh_for_new_column(self, column): for s in self.selects: @@ -1928,14 +2165,17 @@ def _refresh_for_new_column(self, column): return None raise NotImplementedError("CompoundSelect constructs don't support " - "addition of columns to underlying selectables") + "addition of columns to underlying " + "selectables") def _copy_internals(self, clone=_clone, **kw): + super(CompoundSelect, self)._copy_internals(clone, **kw) self._reset_exported() self.selects = [clone(s, **kw) for s in self.selects] if hasattr(self, '_col_map'): del self._col_map - for attr in ('_order_by_clause', '_group_by_clause', '_for_update_arg'): + for attr in ( + '_order_by_clause', '_group_by_clause', '_for_update_arg'): if getattr(self, attr) is not None: setattr(self, attr, clone(getattr(self, attr), **kw)) @@ -1959,45 +2199,7 @@ def _set_bind(self, bind): bind = property(bind, _set_bind) -class HasPrefixes(object): - _prefixes = () - - @_generative - def prefix_with(self, *expr, **kw): - """Add one or more expressions following the statement keyword, i.e. - SELECT, INSERT, UPDATE, or DELETE. Generative. - - This is used to support backend-specific prefix keywords such as those - provided by MySQL. - - E.g.:: - - stmt = table.insert().prefix_with("LOW_PRIORITY", dialect="mysql") - - Multiple prefixes can be specified by multiple calls - to :meth:`.prefix_with`. - - :param \*expr: textual or :class:`.ClauseElement` construct which - will be rendered following the INSERT, UPDATE, or DELETE - keyword. - :param \**kw: A single keyword 'dialect' is accepted. This is an - optional string dialect name which will - limit rendering of this prefix to only that dialect. - - """ - dialect = kw.pop('dialect', None) - if kw: - raise exc.ArgumentError("Unsupported argument(s): %s" % - ",".join(kw)) - self._setup_prefixes(expr, dialect) - - def _setup_prefixes(self, prefixes, dialect=None): - self._prefixes = self._prefixes + tuple( - [(_literal_as_text(p), dialect) for p in prefixes]) - - - -class Select(HasPrefixes, GenerativeSelect): +class Select(HasPrefixes, HasSuffixes, GenerativeSelect): """Represents a ``SELECT`` statement. """ @@ -2005,7 +2207,9 @@ class Select(HasPrefixes, GenerativeSelect): __visit_name__ = 'select' _prefixes = () + _suffixes = () _hints = util.immutabledict() + _statement_hints = () _distinct = False _from_cloned = None _correlate = () @@ -2014,21 +2218,22 @@ class Select(HasPrefixes, GenerativeSelect): _is_select = True def __init__(self, - columns=None, - whereclause=None, - from_obj=None, - distinct=False, - having=None, - correlate=True, - prefixes=None, - **kwargs): + columns=None, + whereclause=None, + from_obj=None, + distinct=False, + having=None, + correlate=True, + prefixes=None, + suffixes=None, + **kwargs): """Construct a new :class:`.Select`. - Similar functionality is also available via the :meth:`.FromClause.select` - method on any :class:`.FromClause`. + Similar functionality is also available via the + :meth:`.FromClause.select` method on any :class:`.FromClause`. - All arguments which accept :class:`.ClauseElement` arguments also accept - string arguments, which will be converted as appropriate into + All arguments which accept :class:`.ClauseElement` arguments also + accept string arguments, which will be converted as appropriate into either :func:`text()` or :func:`literal_column()` constructs. .. seealso:: @@ -2037,54 +2242,75 @@ def __init__(self, :func:`.select`. :param columns: - A list of :class:`.ClauseElement` objects, typically - :class:`.ColumnElement` objects or subclasses, which will form the - columns clause of the resulting statement. For all members which are - instances of :class:`.Selectable`, the individual :class:`.ColumnElement` - members of the :class:`.Selectable` will be added individually to the - columns clause. For example, specifying a - :class:`~sqlalchemy.schema.Table` instance will result in all the - contained :class:`~sqlalchemy.schema.Column` objects within to be added - to the columns clause. - - This argument is not present on the form of :func:`select()` - available on :class:`~sqlalchemy.schema.Table`. + A list of :class:`.ColumnElement` or :class:`.FromClause` + objects which will form the columns clause of the resulting + statement. For those objects that are instances of + :class:`.FromClause` (typically :class:`.Table` or :class:`.Alias` + objects), the :attr:`.FromClause.c` collection is extracted + to form a collection of :class:`.ColumnElement` objects. + + This parameter will also accept :class:`.Text` constructs as + given, as well as ORM-mapped classes. + + .. note:: + + The :paramref:`.select.columns` parameter is not available + in the method form of :func:`.select`, e.g. + :meth:`.FromClause.select`. + + .. seealso:: + + :meth:`.Select.column` + + :meth:`.Select.with_only_columns` :param whereclause: A :class:`.ClauseElement` expression which will be used to form the - ``WHERE`` clause. + ``WHERE`` clause. It is typically preferable to add WHERE + criterion to an existing :class:`.Select` using method chaining + with :meth:`.Select.where`. + + .. seealso:: + + :meth:`.Select.where` :param from_obj: A list of :class:`.ClauseElement` objects which will be added to the - ``FROM`` clause of the resulting statement. Note that "from" objects are - automatically located within the columns and whereclause ClauseElements. - Use this parameter to explicitly specify "from" objects which are not - automatically locatable. This could include - :class:`~sqlalchemy.schema.Table` objects that aren't otherwise present, - or :class:`.Join` objects whose presence will supercede that of the - :class:`~sqlalchemy.schema.Table` objects already located in the other - clauses. + ``FROM`` clause of the resulting statement. This is equivalent + to calling :meth:`.Select.select_from` using method chaining on + an existing :class:`.Select` object. + + .. seealso:: + + :meth:`.Select.select_from` - full description of explicit + FROM clause specification. :param autocommit: - Deprecated. Use .execution_options(autocommit=) + Deprecated. Use ``.execution_options(autocommit=)`` to set the autocommit option. + .. seealso:: + + :meth:`.Executable.execution_options` + :param bind=None: an :class:`~.Engine` or :class:`~.Connection` instance to which the - resulting :class:`.Select` object will be bound. The :class:`.Select` - object will otherwise automatically bind to whatever - :class:`~.base.Connectable` instances can be located within its contained - :class:`.ClauseElement` members. + resulting :class:`.Select` object will be bound. The + :class:`.Select` object will otherwise automatically bind to + whatever :class:`~.base.Connectable` instances can be located within + its contained :class:`.ClauseElement` members. :param correlate=True: indicates that this :class:`.Select` object should have its contained :class:`.FromClause` elements "correlated" to an enclosing - :class:`.Select` object. This means that any :class:`.ClauseElement` - instance within the "froms" collection of this :class:`.Select` - which is also present in the "froms" collection of an - enclosing select will not be rendered in the ``FROM`` clause - of this select statement. + :class:`.Select` object. It is typically preferable to specify + correlations on an existing :class:`.Select` construct using + :meth:`.Select.correlate`. + + .. seealso:: + + :meth:`.Select.correlate` - full description of correlation. :param distinct=False: when ``True``, applies a ``DISTINCT`` qualifier to the columns @@ -2095,15 +2321,20 @@ def __init__(self, is understood by the Postgresql dialect to render the ``DISTINCT ON ()`` syntax. - ``distinct`` is also available via the :meth:`~.Select.distinct` - generative method. + ``distinct`` is also available on an existing :class:`.Select` + object via the :meth:`~.Select.distinct` method. + + .. seealso:: + + :meth:`.Select.distinct` :param for_update=False: when ``True``, applies ``FOR UPDATE`` to the end of the resulting statement. - .. deprecated:: 0.9.0 - use :meth:`.GenerativeSelect.with_for_update` - to specify the structure of the ``FOR UPDATE`` clause. + .. deprecated:: 0.9.0 - use + :meth:`.Select.with_for_update` to specify the + structure of the ``FOR UPDATE`` clause. ``for_update`` accepts various string values interpreted by specific backends, including: @@ -2117,32 +2348,62 @@ def __init__(self, .. seealso:: - :meth:`.GenerativeSelect.with_for_update` - improved API for + :meth:`.Select.with_for_update` - improved API for specifying the ``FOR UPDATE`` clause. :param group_by: a list of :class:`.ClauseElement` objects which will comprise the - ``GROUP BY`` clause of the resulting select. + ``GROUP BY`` clause of the resulting select. This parameter + is typically specified more naturally using the + :meth:`.Select.group_by` method on an existing :class:`.Select`. + + .. seealso:: + + :meth:`.Select.group_by` :param having: a :class:`.ClauseElement` that will comprise the ``HAVING`` clause - of the resulting select when ``GROUP BY`` is used. + of the resulting select when ``GROUP BY`` is used. This parameter + is typically specified more naturally using the + :meth:`.Select.having` method on an existing :class:`.Select`. + + .. seealso:: + + :meth:`.Select.having` :param limit=None: - a numerical value which usually compiles to a ``LIMIT`` - expression in the resulting select. Databases that don't + a numerical value which usually renders as a ``LIMIT`` + expression in the resulting select. Backends that don't support ``LIMIT`` will attempt to provide similar - functionality. + functionality. This parameter is typically specified more naturally + using the :meth:`.Select.limit` method on an existing + :class:`.Select`. + + .. seealso:: + + :meth:`.Select.limit` :param offset=None: - a numeric value which usually compiles to an ``OFFSET`` - expression in the resulting select. Databases that don't + a numeric value which usually renders as an ``OFFSET`` + expression in the resulting select. Backends that don't support ``OFFSET`` will attempt to provide similar - functionality. + functionality. This parameter is typically specified more naturally + using the :meth:`.Select.offset` method on an existing + :class:`.Select`. + + .. seealso:: + + :meth:`.Select.offset` :param order_by: a scalar or list of :class:`.ClauseElement` objects which will comprise the ``ORDER BY`` clause of the resulting select. + This parameter is typically specified more naturally using the + :meth:`.Select.order_by` method on an existing :class:`.Select`. + + .. seealso:: + + :meth:`.Select.order_by` :param use_labels=False: when ``True``, the statement will be generated using labels @@ -2153,8 +2414,13 @@ def __init__(self, collection of the resulting :class:`.Select` object will use these names as well for targeting column members. - use_labels is also available via the :meth:`~.GenerativeSelect.apply_labels` - generative method. + This parameter can also be specified on an existing + :class:`.Select` object using the :meth:`.Select.apply_labels` + method. + + .. seealso:: + + :meth:`.Select.apply_labels` """ self._auto_correlate = correlate @@ -2163,14 +2429,14 @@ def __init__(self, self._distinct = True else: self._distinct = [ - _literal_as_text(e) - for e in util.to_list(distinct) - ] + _literal_as_text(e) + for e in util.to_list(distinct) + ] if from_obj is not None: self._from_obj = util.OrderedSet( - _interpret_as_from(f) - for f in util.to_list(from_obj)) + _interpret_as_from(f) + for f in util.to_list(from_obj)) else: self._from_obj = util.OrderedSet() @@ -2178,7 +2444,7 @@ def __init__(self, cols_present = bool(columns) except TypeError: raise exc.ArgumentError("columns argument to select() must " - "be a Python list or other iterable") + "be a Python list or other iterable") if cols_present: self._raw_columns = [] @@ -2191,18 +2457,23 @@ def __init__(self, self._raw_columns = [] if whereclause is not None: - self._whereclause = _literal_as_text(whereclause) + self._whereclause = _literal_as_text( + whereclause).self_group(against=operators._asbool) else: self._whereclause = None if having is not None: - self._having = _literal_as_text(having) + self._having = _literal_as_text( + having).self_group(against=operators._asbool) else: self._having = None if prefixes: self._setup_prefixes(prefixes) + if suffixes: + self._setup_suffixes(suffixes) + GenerativeSelect.__init__(self, **kwargs) @property @@ -2215,26 +2486,25 @@ def _froms(self): seen = set() translate = self._from_cloned - def add(items): - for item in items: - if item is self: - raise exc.InvalidRequestError( - "select() construct refers to itself as a FROM") - if translate and item in translate: - item = translate[item] - if not seen.intersection(item._cloned_set): - froms.append(item) - seen.update(item._cloned_set) - - add(_from_objects(*self._raw_columns)) - if self._whereclause is not None: - add(_from_objects(self._whereclause)) - add(self._from_obj) + for item in itertools.chain( + _from_objects(*self._raw_columns), + _from_objects(self._whereclause) + if self._whereclause is not None else (), + self._from_obj + ): + if item is self: + raise exc.InvalidRequestError( + "select() construct refers to itself as a FROM") + if translate and item in translate: + item = translate[item] + if not seen.intersection(item._cloned_set): + froms.append(item) + seen.update(item._cloned_set) return froms def _get_display_froms(self, explicit_correlate_froms=None, - implicit_correlate_froms=None): + implicit_correlate_froms=None): """Return the full list of 'from' clauses to be displayed. Takes into account a set of existing froms which may be @@ -2246,8 +2516,8 @@ def _get_display_froms(self, explicit_correlate_froms=None, froms = self._froms toremove = set(itertools.chain(*[ - _expand_cloned(f._hide_froms) - for f in froms])) + _expand_cloned(f._hide_froms) + for f in froms])) if toremove: # if we're maintaining clones of froms, # add the copies out to the toremove list. only include @@ -2268,7 +2538,8 @@ def _get_display_froms(self, explicit_correlate_froms=None, froms = [ f for f in froms if f not in _cloned_intersection( - _cloned_intersection(froms, explicit_correlate_froms or ()), + _cloned_intersection( + froms, explicit_correlate_froms or ()), to_correlate ) ] @@ -2278,14 +2549,15 @@ def _get_display_froms(self, explicit_correlate_froms=None, froms = [ f for f in froms if f not in _cloned_difference( - _cloned_intersection(froms, explicit_correlate_froms or ()), + _cloned_intersection( + froms, explicit_correlate_froms or ()), self._correlate_except ) ] if self._auto_correlate and \ - implicit_correlate_froms and \ - len(froms) > 1: + implicit_correlate_froms and \ + len(froms) > 1: froms = [ f for f in froms if f not in @@ -2294,10 +2566,11 @@ def _get_display_froms(self, explicit_correlate_froms=None, if not len(froms): raise exc.InvalidRequestError("Select statement '%s" - "' returned no FROM clauses due to " - "auto-correlation; specify " - "correlate() to control " - "correlation manually." % self) + "' returned no FROM clauses " + "due to auto-correlation; " + "specify correlate() " + "to control correlation " + "manually." % self) return froms @@ -2312,10 +2585,30 @@ def froms(self): return self._get_display_froms() + def with_statement_hint(self, text, dialect_name='*'): + """add a statement hint to this :class:`.Select`. + + This method is similar to :meth:`.Select.with_hint` except that + it does not require an individual table, and instead applies to the + statement as a whole. + + Hints here are specific to the backend database and may include + directives such as isolation levels, file directives, fetch directives, + etc. + + .. versionadded:: 1.0.0 + + .. seealso:: + + :meth:`.Select.with_hint` + + """ + return self.with_hint(None, text, dialect_name) + @_generative def with_hint(self, selectable, text, dialect_name='*'): - """Add an indexing hint for the given selectable to this - :class:`.Select`. + """Add an indexing or other executional context hint for the given + selectable to this :class:`.Select`. The text of the hint is rendered in the appropriate location for the database backend in use, relative @@ -2327,7 +2620,7 @@ def with_hint(self, selectable, text, dialect_name='*'): following:: select([mytable]).\\ - with_hint(mytable, "+ index(%(name)s ix_mytable)") + with_hint(mytable, "index(%(name)s ix_mytable)") Would render SQL as:: @@ -2338,18 +2631,26 @@ def with_hint(self, selectable, text, dialect_name='*'): and Sybase simultaneously:: select([mytable]).\\ - with_hint(mytable, "+ index(%(name)s ix_mytable)", 'oracle').\\ + with_hint(mytable, "index(%(name)s ix_mytable)", 'oracle').\\ with_hint(mytable, "WITH INDEX ix_mytable", 'sybase') + .. seealso:: + + :meth:`.Select.with_statement_hint` + """ - self._hints = self._hints.union( - {(selectable, dialect_name): text}) + if selectable is None: + self._statement_hints += ((dialect_name, text), ) + else: + self._hints = self._hints.union( + {(selectable, dialect_name): text}) @property def type(self): raise exc.InvalidRequestError("Select objects don't have a type. " - "Call as_scalar() on this Select object " - "to return a 'scalar' version of this Select.") + "Call as_scalar() on this Select " + "object to return a 'scalar' version " + "of this Select.") @_memoized_property.method def locate_all_froms(self): @@ -2371,6 +2672,20 @@ def inner_columns(self): """ return _select_iterables(self._raw_columns) + @_memoized_property + def _label_resolve_dict(self): + with_cols = dict( + (c._resolve_label or c._label or c.key, c) + for c in _select_iterables(self._raw_columns) + if c._allow_label_resolve) + only_froms = dict( + (c.key, c) for c in + _select_iterables(self.froms) if c._allow_label_resolve) + for key, value in only_froms.items(): + with_cols.setdefault(key, value) + + return with_cols, only_froms + def is_derived_from(self, fromclause): if self in fromclause._cloned_set: return True @@ -2381,6 +2696,7 @@ def is_derived_from(self, fromclause): return False def _copy_internals(self, clone=_clone, **kw): + super(Select, self)._copy_internals(clone, **kw) # Select() object has been cloned and probably adapted by the # given clone function. Apply the cloning function to internal @@ -2393,18 +2709,18 @@ def _copy_internals(self, clone=_clone, **kw): # as of 0.7.4 we also put the current version of _froms, which # gets cleared on each generation. previously we were "baking" # _froms into self._from_obj. - self._from_cloned = from_cloned = dict((f, clone(f, **kw)) - for f in self._from_obj.union(self._froms)) + self._from_cloned = from_cloned = dict( + (f, clone(f, **kw)) for f in self._from_obj.union(self._froms)) # 3. update persistent _from_obj with the cloned versions. self._from_obj = util.OrderedSet(from_cloned[f] for f in - self._from_obj) + self._from_obj) # the _correlate collection is done separately, what can happen # here is the same item is _correlate as in _from_obj but the # _correlate version has an annotation on it - (specifically # RelationshipProperty.Comparator._criterion_exists() does - # this). Also keep _correlate liberally open with it's previous + # this). Also keep _correlate liberally open with its previous # contents, as this set is used for matching, not rendering. self._correlate = set(clone(f) for f in self._correlate).union(self._correlate) @@ -2412,11 +2728,11 @@ def _copy_internals(self, clone=_clone, **kw): # 4. clone other things. The difficulty here is that Column # objects are not actually cloned, and refer to their original # .table, resulting in the wrong "from" parent after a clone - # operation. Hence _from_cloned and _from_obj supercede what is + # operation. Hence _from_cloned and _from_obj supersede what is # present here. self._raw_columns = [clone(c, **kw) for c in self._raw_columns] for attr in '_whereclause', '_having', '_order_by_clause', \ - '_group_by_clause', '_for_update_arg': + '_group_by_clause', '_for_update_arg': if getattr(self, attr) is not None: setattr(self, attr, clone(getattr(self, attr), **kw)) @@ -2432,7 +2748,7 @@ def get_children(self, column_collections=True, **kwargs): [x for x in (self._whereclause, self._having, self._order_by_clause, self._group_by_clause) - if x is not None] + if x is not None] @_generative def column(self, column): @@ -2467,12 +2783,12 @@ def reduce_columns(self, sqlutil, only_synonyms=True): """ return self.with_only_columns( - sqlutil.reduce_columns( - self.inner_columns, - only_synonyms=only_synonyms, - *(self._whereclause, ) + tuple(self._from_obj) - ) + sqlutil.reduce_columns( + self.inner_columns, + only_synonyms=only_synonyms, + *(self._whereclause, ) + tuple(self._from_obj) ) + ) @_generative def with_only_columns(self, columns): @@ -2599,7 +2915,7 @@ def distinct(self, *expr): """ if expr: - expr = [_literal_as_text(e) for e in expr] + expr = [_literal_as_label_reference(e) for e in expr] if isinstance(self._distinct, list): self._distinct = self._distinct + expr else: @@ -2679,16 +2995,16 @@ def correlate(self, *fromclauses): :meth:`.Select.correlate`. .. versionchanged:: 0.8.0 The :meth:`.Select.correlate` method no - longer unconditionally removes entries from the FROM clause; instead, - the candidate FROM entries must also be matched by a FROM entry - located in an enclosing :class:`.Select`, which ultimately encloses - this one as present in the WHERE clause, ORDER BY clause, HAVING - clause, or columns clause of an enclosing :meth:`.Select`. + longer unconditionally removes entries from the FROM clause; + instead, the candidate FROM entries must also be matched by a FROM + entry located in an enclosing :class:`.Select`, which ultimately + encloses this one as present in the WHERE clause, ORDER BY clause, + HAVING clause, or columns clause of an enclosing :meth:`.Select`. .. versionchanged:: 0.8.2 explicit correlation takes place via any level of nesting of :class:`.Select` objects; in previous - 0.8 versions, correlation would only occur relative to the immediate - enclosing :class:`.Select` construct. + 0.8 versions, correlation would only occur relative to the + immediate enclosing :class:`.Select` construct. .. seealso:: @@ -2702,7 +3018,7 @@ def correlate(self, *fromclauses): self._correlate = () else: self._correlate = set(self._correlate).union( - _interpret_as_from(f) for f in fromclauses) + _interpret_as_from(f) for f in fromclauses) @_generative def correlate_except(self, *fromclauses): @@ -2744,21 +3060,21 @@ def correlate_except(self, *fromclauses): self._correlate_except = () else: self._correlate_except = set(self._correlate_except or ()).union( - _interpret_as_from(f) for f in fromclauses) + _interpret_as_from(f) for f in fromclauses) def append_correlation(self, fromclause): """append the given correlation expression to this select() construct. This is an **in-place** mutation method; the - :meth:`~.Select.correlate` method is preferred, as it provides standard - :term:`method chaining`. + :meth:`~.Select.correlate` method is preferred, as it provides + standard :term:`method chaining`. """ self._auto_correlate = False self._correlate = set(self._correlate).union( - _interpret_as_from(f) for f in fromclause) + _interpret_as_from(f) for f in fromclause) def append_column(self, column): """append the given column expression to the columns clause of this @@ -2782,8 +3098,8 @@ def append_prefix(self, clause): construct. This is an **in-place** mutation method; the - :meth:`~.Select.prefix_with` method is preferred, as it provides standard - :term:`method chaining`. + :meth:`~.Select.prefix_with` method is preferred, as it provides + standard :term:`method chaining`. """ clause = _literal_as_text(clause) @@ -2802,7 +3118,8 @@ def append_whereclause(self, whereclause): """ self._reset_exported() - self._whereclause = and_(True_._ifnone(self._whereclause), whereclause) + self._whereclause = and_( + True_._ifnone(self._whereclause), whereclause) def append_having(self, having): """append the given expression to this select() construct's HAVING @@ -2823,22 +3140,23 @@ def append_from(self, fromclause): FROM clause. This is an **in-place** mutation method; the - :meth:`~.Select.select_from` method is preferred, as it provides standard - :term:`method chaining`. + :meth:`~.Select.select_from` method is preferred, as it provides + standard :term:`method chaining`. """ self._reset_exported() fromclause = _interpret_as_from(fromclause) self._from_obj = self._from_obj.union([fromclause]) - @_memoized_property def _columns_plus_names(self): if self.use_labels: names = set() + def name_for_col(c): - if c._label is None: + if c._label is None or not c._render_label_in_columns_clause: return (None, c) + name = c._label if name in names: name = c.anon_label @@ -2848,12 +3166,14 @@ def name_for_col(c): return [ name_for_col(c) - for c in util.unique_list(_select_iterables(self._raw_columns)) + for c in util.unique_list( + _select_iterables(self._raw_columns)) ] else: return [ (None, c) - for c in util.unique_list(_select_iterables(self._raw_columns)) + for c in util.unique_list( + _select_iterables(self._raw_columns)) ] def _populate_column_collection(self): @@ -2870,8 +3190,8 @@ def _populate_column_collection(self): key = None c._make_proxy(self, key=key, - name=name, - name_is_truncatable=True) + name=name, + name_is_truncatable=True) def _refresh_for_new_column(self, column): for fromclause in self._froms: @@ -2880,7 +3200,8 @@ def _refresh_for_new_column(self, column): if col in self.inner_columns and self._cols_populated: our_label = col._key_label if self.use_labels else col.key if our_label not in self.c: - return col._make_proxy(self, + return col._make_proxy( + self, name=col._label if self.use_labels else None, key=col._key_label if self.use_labels else None, name_is_truncatable=True) @@ -2965,6 +3286,7 @@ def _set_bind(self, bind): class ScalarSelect(Generative, Grouping): _from_objects = [] + _is_from_container = True def __init__(self, element): self.element = element @@ -2973,8 +3295,8 @@ def __init__(self, element): @property def columns(self): raise exc.InvalidRequestError('Scalar Select expression has no ' - 'columns; use this object directly within a ' - 'column-level expression.') + 'columns; use this object directly ' + 'within a column-level expression.') c = columns @_generative @@ -2996,7 +3318,6 @@ class Exists(UnaryExpression): __visit_name__ = UnaryExpression.__visit_name__ _from_objects = [] - def __init__(self, *args, **kwargs): """Construct a new :class:`.Exists` against an existing :class:`.Select` object. @@ -3023,7 +3344,8 @@ def __init__(self, *args, **kwargs): s = Select(*args, **kwargs).as_scalar().self_group() UnaryExpression.__init__(self, s, operator=operators.exists, - type_=type_api.BOOLEANTYPE) + type_=type_api.BOOLEANTYPE, + wraps_column_expression=True) def select(self, whereclause=None, **params): return Select([self], whereclause, **params) @@ -3062,8 +3384,8 @@ class TextAsFrom(SelectBase): """Wrap a :class:`.TextClause` construct within a :class:`.SelectBase` interface. - This allows the :class:`.TextClause` object to gain a ``.c`` collection and - other FROM-like capabilities such as :meth:`.FromClause.alias`, + This allows the :class:`.TextClause` object to gain a ``.c`` collection + and other FROM-like capabilities such as :meth:`.FromClause.alias`, :meth:`.SelectBase.cte`, etc. The :class:`.TextAsFrom` construct is produced via the @@ -3105,11 +3427,10 @@ def _copy_internals(self, clone=_clone, **kw): def _scalar_type(self): return self.column_args[0].type + class AnnotatedFromClause(Annotated): def __init__(self, element, values): # force FromClause to generate their internal # collections into __dict__ element.c Annotated.__init__(self, element, values) - - diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py index f3468ebc24..20a9b21e35 100644 --- a/lib/sqlalchemy/sql/sqltypes.py +++ b/lib/sqlalchemy/sql/sqltypes.py @@ -1,5 +1,6 @@ # sql/sqltypes.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -12,8 +13,7 @@ import codecs from .type_api import TypeEngine, TypeDecorator, to_instance -from .elements import quoted_name, type_coerce -from .default_comparator import _DefaultColumnComparator +from .elements import quoted_name, type_coerce, _defer_name from .. import exc, util, processors from .base import _bind_or_error, SchemaEventTarget from . import operators @@ -24,7 +24,9 @@ if util.jython: import array + class _DateAffinity(object): + """Mixin date/time specific expression adaptations. Rules are implemented within Date,Time,Interval,DateTime, Numeric, @@ -42,19 +44,28 @@ class Comparator(TypeEngine.Comparator): def _adapt_expression(self, op, other_comparator): othertype = other_comparator.type._type_affinity - return op, \ - to_instance(self.type._expression_adaptations.get(op, self._blank_dict).\ + return ( + op, to_instance( + self.type._expression_adaptations. + get(op, self._blank_dict). get(othertype, NULLTYPE)) + ) comparator_factory = Comparator + class Concatenable(object): + """A mixin that marks a type as supporting 'concatenation', typically strings.""" class Comparator(TypeEngine.Comparator): + def _adapt_expression(self, op, other_comparator): - if op is operators.add and isinstance(other_comparator, - (Concatenable.Comparator, NullType.Comparator)): + if (op is operators.add and + isinstance( + other_comparator, + (Concatenable.Comparator, NullType.Comparator) + )): return operators.concat_op, self.expr.type else: return op, self.expr.type @@ -63,6 +74,7 @@ def _adapt_expression(self, op, other_comparator): class String(Concatenable, TypeEngine): + """The base for all string and character types. In SQL, corresponds to VARCHAR. Can also take Python unicode objects @@ -78,10 +90,10 @@ class String(Concatenable, TypeEngine): __visit_name__ = 'string' def __init__(self, length=None, collation=None, - convert_unicode=False, - unicode_error=None, - _warn_on_bytestring=False - ): + convert_unicode=False, + unicode_error=None, + _warn_on_bytestring=False + ): """ Create a string-holding type. @@ -146,7 +158,7 @@ def __init__(self, length=None, collation=None, """ if unicode_error is not None and convert_unicode != 'force': raise exc.ArgumentError("convert_unicode must be 'force' " - "when unicode_error is set.") + "when unicode_error is set.") self.length = length self.collation = collation @@ -163,12 +175,14 @@ def process(value): def bind_processor(self, dialect): if self.convert_unicode or dialect.convert_unicode: if dialect.supports_unicode_binds and \ - self.convert_unicode != 'force': + self.convert_unicode != 'force': if self._warn_on_bytestring: def process(value): if isinstance(value, util.binary_type): - util.warn("Unicode type received non-unicode bind " - "param value.") + util.warn_limited( + "Unicode type received non-unicode " + "bind param value %r.", + (util.ellipses_string(value),)) return value return process else: @@ -181,8 +195,10 @@ def process(value): if isinstance(value, util.text_type): return encoder(value, self.unicode_error)[0] elif warn_on_bytestring and value is not None: - util.warn("Unicode type received non-unicode bind " - "param value") + util.warn_limited( + "Unicode type received non-unicode bind " + "param value %r.", + (util.ellipses_string(value),)) return value return process else: @@ -191,23 +207,20 @@ def process(value): def result_processor(self, dialect, coltype): wants_unicode = self.convert_unicode or dialect.convert_unicode needs_convert = wants_unicode and \ - (dialect.returns_unicode_strings is not True or - self.convert_unicode in ('force', 'force_nocheck')) + (dialect.returns_unicode_strings is not True or + self.convert_unicode in ('force', 'force_nocheck')) needs_isinstance = ( - needs_convert and - dialect.returns_unicode_strings and - self.convert_unicode != 'force_nocheck' - ) + needs_convert and + dialect.returns_unicode_strings and + self.convert_unicode != 'force_nocheck' + ) if needs_convert: - to_unicode = processors.to_unicode_processor_factory( - dialect.encoding, self.unicode_error) - if needs_isinstance: return processors.to_conditional_unicode_processor_factory( - dialect.encoding, self.unicode_error) + dialect.encoding, self.unicode_error) else: return processors.to_unicode_processor_factory( - dialect.encoding, self.unicode_error) + dialect.encoding, self.unicode_error) else: return None @@ -223,6 +236,7 @@ def get_dbapi_type(self, dbapi): class Text(String): + """A variably sized string type. In SQL, usually corresponds to CLOB or TEXT. Can also take Python @@ -236,6 +250,7 @@ class Text(String): class Unicode(String): + """A variable length Unicode string type. The :class:`.Unicode` type is a :class:`.String` subclass @@ -307,6 +322,7 @@ def __init__(self, length=None, **kwargs): class UnicodeText(Text): + """An unbounded-length Unicode string type. See :class:`.Unicode` for details on the unicode @@ -335,6 +351,7 @@ def __init__(self, length=None, **kwargs): class Integer(_DateAffinity, TypeEngine): + """A type for ``int`` integers.""" __visit_name__ = 'integer' @@ -381,8 +398,8 @@ def _expression_adaptations(self): } - class SmallInteger(Integer): + """A type for smaller ``int`` integers. Typically generates a ``SMALLINT`` in DDL, and otherwise acts like @@ -394,6 +411,7 @@ class SmallInteger(Integer): class BigInteger(Integer): + """A type for bigger ``int`` integers. Typically generates a ``BIGINT`` in DDL, and otherwise acts like @@ -404,13 +422,13 @@ class BigInteger(Integer): __visit_name__ = 'big_integer' - class Numeric(_DateAffinity, TypeEngine): + """A type for fixed precision numbers, such as ``NUMERIC`` or ``DECIMAL``. - This type returns Python ``decimal.Decimal`` objects by default, unless the - :paramref:`.Numeric.asdecimal` flag is set to False, in which case they - are coerced to Python ``float`` objects. + This type returns Python ``decimal.Decimal`` objects by default, unless + the :paramref:`.Numeric.asdecimal` flag is set to False, in which case + they are coerced to Python ``float`` objects. .. note:: @@ -420,8 +438,8 @@ class Numeric(_DateAffinity, TypeEngine): type (e.g. ``FLOAT``, ``REAL``, others). If the database column on the server is in fact a floating-point type type, such as ``FLOAT`` or ``REAL``, use the :class:`.Float` - type or a subclass, otherwise numeric coercion between ``float``/``Decimal`` - may or may not function as expected. + type or a subclass, otherwise numeric coercion between + ``float``/``Decimal`` may or may not function as expected. .. note:: @@ -449,7 +467,7 @@ class Numeric(_DateAffinity, TypeEngine): _default_decimal_return_scale = 10 def __init__(self, precision=None, scale=None, - decimal_return_scale=None, asdecimal=True): + decimal_return_scale=None, asdecimal=True): """ Construct a Numeric. @@ -470,9 +488,10 @@ def __init__(self, precision=None, scale=None, database types don't have a notion of "scale", so by default the float type looks for the first ten decimal places when converting. Specfiying this value will override that length. Types which - do include an explicit ".scale" value, such as the base :class:`.Numeric` - as well as the MySQL float types, will use the value of ".scale" - as the default for decimal_return_scale, if not otherwise specified. + do include an explicit ".scale" value, such as the base + :class:`.Numeric` as well as the MySQL float types, will use the + value of ".scale" as the default for decimal_return_scale, if not + otherwise specified. .. versionadded:: 0.9.0 @@ -544,9 +563,9 @@ def result_processor(self, dialect, coltype): # we're a "numeric", DBAPI returns floats, convert. return processors.to_decimal_processor_factory( - decimal.Decimal, - self.scale if self.scale is not None - else self._default_decimal_return_scale) + decimal.Decimal, + self.scale if self.scale is not None + else self._default_decimal_return_scale) else: if dialect.supports_native_decimal: return processors.to_float @@ -581,6 +600,7 @@ def _expression_adaptations(self): class Float(Numeric): + """Type representing floating point types, such as ``FLOAT`` or ``REAL``. This type returns Python ``float`` objects by default, unless the @@ -595,8 +615,8 @@ class Float(Numeric): and not a decimal type (e.g. ``DECIMAL``, ``NUMERIC``, others). If the database column on the server is in fact a Numeric type, such as ``DECIMAL`` or ``NUMERIC``, use the :class:`.Numeric` - type or a subclass, otherwise numeric coercion between ``float``/``Decimal`` - may or may not function as expected. + type or a subclass, otherwise numeric coercion between + ``float``/``Decimal`` may or may not function as expected. """ @@ -605,7 +625,7 @@ class Float(Numeric): scale = None def __init__(self, precision=None, asdecimal=False, - decimal_return_scale=None, **kwargs): + decimal_return_scale=None, **kwargs): """ Construct a Float. @@ -639,13 +659,13 @@ def __init__(self, precision=None, asdecimal=False, self.decimal_return_scale = decimal_return_scale if kwargs: util.warn_deprecated("Additional keyword arguments " - "passed to Float ignored.") + "passed to Float ignored.") def result_processor(self, dialect, coltype): if self.asdecimal: return processors.to_decimal_processor_factory( - decimal.Decimal, - self._effective_decimal_return_scale) + decimal.Decimal, + self._effective_decimal_return_scale) else: return None @@ -672,6 +692,7 @@ def _expression_adaptations(self): class DateTime(_DateAffinity, TypeEngine): + """A type for ``datetime.datetime()`` objects. Date and time types return objects from the Python ``datetime`` @@ -716,6 +737,7 @@ def _expression_adaptations(self): class Date(_DateAffinity, TypeEngine): + """A type for ``datetime.date()`` objects.""" __visit_name__ = 'date' @@ -753,6 +775,7 @@ def _expression_adaptations(self): class Time(_DateAffinity, TypeEngine): + """A type for ``datetime.time()`` objects.""" __visit_name__ = 'time' @@ -782,6 +805,7 @@ def _expression_adaptations(self): class _Binary(TypeEngine): + """Define base behavior for binary types.""" def __init__(self, length=None): @@ -849,11 +873,12 @@ def get_dbapi_type(self, dbapi): class LargeBinary(_Binary): + """A type for large binary byte data. - The Binary type generates BLOB or BYTEA when tables are created, - and also converts incoming values using the ``Binary`` callable - provided by each DB-API. + The :class:`.LargeBinary` type corresponds to a large and/or unlengthed + binary type for the target platform, such as BLOB on MySQL and BYTEA for + Postgresql. It also handles the necessary conversions for the DBAPI. """ @@ -864,19 +889,15 @@ def __init__(self, length=None): Construct a LargeBinary type. :param length: optional, a length for the column for use in - DDL statements, for those BLOB types that accept a length - (i.e. MySQL). It does *not* produce a small BINARY/VARBINARY - type - use the BINARY/VARBINARY types specifically for those. - May be safely omitted if no ``CREATE - TABLE`` will be issued. Certain databases may require a - *length* for use in DDL, and will raise an exception when - the ``CREATE TABLE`` DDL is issued. + DDL statements, for those binary types that accept a length, + such as the MySQL BLOB type. """ _Binary.__init__(self, length=length) class Binary(LargeBinary): + """Deprecated. Renamed to LargeBinary.""" def __init__(self, *arg, **kw): @@ -885,8 +906,8 @@ def __init__(self, *arg, **kw): LargeBinary.__init__(self, *arg, **kw) - class SchemaType(SchemaEventTarget): + """Mark a type as possibly requiring schema-level DDL for usage. Supports types that must be explicitly created/dropped (i.e. PG ENUM type) @@ -909,7 +930,7 @@ class SchemaType(SchemaEventTarget): """ def __init__(self, name=None, schema=None, metadata=None, - inherit_schema=False, quote=None): + inherit_schema=False, quote=None, _create_events=True): if name is not None: self.name = quoted_name(name, quote) else: @@ -917,7 +938,9 @@ def __init__(self, name=None, schema=None, metadata=None, self.schema = schema self.metadata = metadata self.inherit_schema = inherit_schema - if self.metadata: + self._create_events = _create_events + + if _create_events and self.metadata: event.listen( self.metadata, "before_create", @@ -936,11 +959,14 @@ def _set_table(self, column, table): if self.inherit_schema: self.schema = table.schema + if not self._create_events: + return + event.listen( table, "before_create", - util.portable_instancemethod( - self._on_table_create) + util.portable_instancemethod( + self._on_table_create) ) event.listen( table, @@ -962,17 +988,19 @@ def _set_table(self, column, table): ) def copy(self, **kw): - return self.adapt(self.__class__) + return self.adapt(self.__class__, _create_events=True) def adapt(self, impltype, **kw): schema = kw.pop('schema', self.schema) metadata = kw.pop('metadata', self.metadata) + _create_events = kw.pop('_create_events', False) + return impltype(name=self.name, - schema=schema, - metadata=metadata, - inherit_schema=self.inherit_schema, - **kw - ) + schema=schema, + inherit_schema=self.inherit_schema, + metadata=metadata, + _create_events=_create_events, + **kw) @property def bind(self): @@ -1016,7 +1044,9 @@ def _on_metadata_drop(self, target, bind, **kw): if t.__class__ is not self.__class__ and isinstance(t, SchemaType): t._on_metadata_drop(target, bind, **kw) + class Enum(String, SchemaType): + """Generic Enum Type. The Enum type provides a set of possible string values which the @@ -1054,7 +1084,7 @@ def __init__(self, *enums, **kw): operations. If the type is not associated with any ``MetaData`` object, it will associate itself with each ``Table`` in which it is used, and will be created when any of those individual tables are - created, after a check is performed for it's existence. The type is + created, after a check is performed for its existence. The type is only dropped when ``drop_all()`` is called for that ``Table`` object's metadata, however. @@ -1113,12 +1143,13 @@ def __init__(self, *enums, **kw): def __repr__(self): return util.generic_repr(self, - to_inspect=[Enum, SchemaType], - ) + additional_kw=[('native_enum', True)], + to_inspect=[Enum, SchemaType], + ) def _should_create_constraint(self, compiler): return not self.native_enum or \ - not compiler.dialect.supports_native_enum + not compiler.dialect.supports_native_enum @util.dependencies("sqlalchemy.sql.schema") def _set_table(self, schema, column, table): @@ -1126,31 +1157,35 @@ def _set_table(self, schema, column, table): SchemaType._set_table(self, column, table) e = schema.CheckConstraint( - type_coerce(column, self).in_(self.enums), - name=self.name, - _create_rule=util.portable_instancemethod( - self._should_create_constraint) - ) + type_coerce(column, self).in_(self.enums), + name=_defer_name(self.name), + _create_rule=util.portable_instancemethod( + self._should_create_constraint), + _type_bound=True + ) assert e.table is table def adapt(self, impltype, **kw): schema = kw.pop('schema', self.schema) metadata = kw.pop('metadata', self.metadata) + _create_events = kw.pop('_create_events', False) if issubclass(impltype, Enum): return impltype(name=self.name, - schema=schema, - metadata=metadata, - convert_unicode=self.convert_unicode, - native_enum=self.native_enum, - inherit_schema=self.inherit_schema, - *self.enums, - **kw - ) + schema=schema, + metadata=metadata, + convert_unicode=self.convert_unicode, + native_enum=self.native_enum, + inherit_schema=self.inherit_schema, + _create_events=_create_events, + *self.enums, + **kw) else: + # TODO: why would we be here? return super(Enum, self).adapt(impltype, **kw) class PickleType(TypeDecorator): + """Holds Python objects, which are serialized using pickle. PickleType builds upon the Binary type to apply Python's @@ -1166,7 +1201,7 @@ class PickleType(TypeDecorator): impl = LargeBinary def __init__(self, protocol=pickle.HIGHEST_PROTOCOL, - pickler=None, comparator=None): + pickler=None, comparator=None): """ Construct a PickleType. @@ -1231,6 +1266,7 @@ def compare_values(self, x, y): class Boolean(TypeEngine, SchemaType): + """A bool datatype. Boolean typically uses BOOLEAN or SMALLINT on the DDL side, and on @@ -1240,7 +1276,8 @@ class Boolean(TypeEngine, SchemaType): __visit_name__ = 'boolean' - def __init__(self, create_constraint=True, name=None): + def __init__( + self, create_constraint=True, name=None, _create_events=True): """Construct a Boolean. :param create_constraint: defaults to True. If the boolean @@ -1253,6 +1290,7 @@ def __init__(self, create_constraint=True, name=None): """ self.create_constraint = create_constraint self.name = name + self._create_events = _create_events def _should_create_constraint(self, compiler): return not compiler.dialect.supports_native_boolean @@ -1263,11 +1301,12 @@ def _set_table(self, schema, column, table): return e = schema.CheckConstraint( - type_coerce(column, self).in_([0, 1]), - name=self.name, - _create_rule=util.portable_instancemethod( - self._should_create_constraint) - ) + type_coerce(column, self).in_([0, 1]), + name=_defer_name(self.name), + _create_rule=util.portable_instancemethod( + self._should_create_constraint), + _type_bound=True + ) assert e.table is table @property @@ -1297,6 +1336,7 @@ def result_processor(self, dialect, coltype): class Interval(_DateAffinity, TypeDecorator): + """A type for ``datetime.timedelta()`` objects. The Interval type deals with ``datetime.timedelta`` objects. In @@ -1317,8 +1357,8 @@ class Interval(_DateAffinity, TypeDecorator): epoch = dt.datetime.utcfromtimestamp(0) def __init__(self, native=True, - second_precision=None, - day_precision=None): + second_precision=None, + day_precision=None): """Construct an Interval object. :param native: when True, use the actual @@ -1345,10 +1385,10 @@ def adapt(self, cls, **kw): return cls._adapt_from_generic_interval(self, **kw) else: return self.__class__( - native=self.native, - second_precision=self.second_precision, - day_precision=self.day_precision, - **kw) + native=self.native, + second_precision=self.second_precision, + day_precision=self.day_precision, + **kw) @property def python_type(self): @@ -1419,30 +1459,35 @@ def coerce_compared_value(self, op, value): class REAL(Float): + """The SQL REAL type.""" __visit_name__ = 'REAL' class FLOAT(Float): + """The SQL FLOAT type.""" __visit_name__ = 'FLOAT' class NUMERIC(Numeric): + """The SQL NUMERIC type.""" __visit_name__ = 'NUMERIC' class DECIMAL(Numeric): + """The SQL DECIMAL type.""" __visit_name__ = 'DECIMAL' class INTEGER(Integer): + """The SQL INT or INTEGER type.""" __visit_name__ = 'INTEGER' @@ -1450,18 +1495,21 @@ class INTEGER(Integer): class SMALLINT(SmallInteger): + """The SQL SMALLINT type.""" __visit_name__ = 'SMALLINT' class BIGINT(BigInteger): + """The SQL BIGINT type.""" __visit_name__ = 'BIGINT' class TIMESTAMP(DateTime): + """The SQL TIMESTAMP type.""" __visit_name__ = 'TIMESTAMP' @@ -1471,30 +1519,35 @@ def get_dbapi_type(self, dbapi): class DATETIME(DateTime): + """The SQL DATETIME type.""" __visit_name__ = 'DATETIME' class DATE(Date): + """The SQL DATE type.""" __visit_name__ = 'DATE' class TIME(Time): + """The SQL TIME type.""" __visit_name__ = 'TIME' class TEXT(Text): + """The SQL TEXT type.""" __visit_name__ = 'TEXT' class CLOB(Text): + """The CLOB type. This type is found in Oracle and Informix. @@ -1504,53 +1557,63 @@ class CLOB(Text): class VARCHAR(String): + """The SQL VARCHAR type.""" __visit_name__ = 'VARCHAR' class NVARCHAR(Unicode): + """The SQL NVARCHAR type.""" __visit_name__ = 'NVARCHAR' class CHAR(String): + """The SQL CHAR type.""" __visit_name__ = 'CHAR' class NCHAR(Unicode): + """The SQL NCHAR type.""" __visit_name__ = 'NCHAR' class BLOB(LargeBinary): + """The SQL BLOB type.""" __visit_name__ = 'BLOB' class BINARY(_Binary): + """The SQL BINARY type.""" __visit_name__ = 'BINARY' class VARBINARY(_Binary): + """The SQL VARBINARY type.""" __visit_name__ = 'VARBINARY' class BOOLEAN(Boolean): + """The SQL BOOLEAN type.""" __visit_name__ = 'BOOLEAN' + class NullType(TypeEngine): + """An unknown type. :class:`.NullType` is used as a default type for those cases where @@ -1564,12 +1627,13 @@ class NullType(TypeEngine): as ``None`` or is not passed at all. The :class:`.NullType` can be used within SQL expression invocation - without issue, it just has no behavior either at the expression construction - level or at the bind-parameter/result processing level. :class:`.NullType` - will result in a :exc:`.CompileError` if the compiler is asked to render - the type itself, such as if it is used in a :func:`.cast` operation - or within a schema creation operation such as that invoked by - :meth:`.MetaData.create_all` or the :class:`.CreateTable` construct. + without issue, it just has no behavior either at the expression + construction level or at the bind-parameter/result processing level. + :class:`.NullType` will result in a :exc:`.CompileError` if the compiler + is asked to render the type itself, such as if it is used in a + :func:`.cast` operation or within a schema creation operation such as that + invoked by :meth:`.MetaData.create_all` or the :class:`.CreateTable` + construct. """ __visit_name__ = 'null' @@ -1582,19 +1646,36 @@ def process(value): return process class Comparator(TypeEngine.Comparator): + def _adapt_expression(self, op, other_comparator): if isinstance(other_comparator, NullType.Comparator) or \ - not operators.is_commutative(op): + not operators.is_commutative(op): return op, self.expr.type else: return other_comparator._adapt_expression(op, self) comparator_factory = Comparator +class MatchType(Boolean): + """Refers to the return type of the MATCH operator. + + As the :meth:`.ColumnOperators.match` is probably the most open-ended + operator in generic SQLAlchemy Core, we can't assume the return type + at SQL evaluation time, as MySQL returns a floating point, not a boolean, + and other backends might do something different. So this type + acts as a placeholder, currently subclassing :class:`.Boolean`. + The type allows dialects to inject result-processing functionality + if needed, and on MySQL will return floating-point values. + + .. versionadded:: 1.0.0 + + """ + NULLTYPE = NullType() BOOLEANTYPE = Boolean() STRINGTYPE = String() INTEGERTYPE = Integer() +MATCHTYPE = MatchType() _type_map = { int: Integer(), @@ -1622,18 +1703,7 @@ def _adapt_expression(self, op, other_comparator): type_api.STRINGTYPE = STRINGTYPE type_api.INTEGERTYPE = INTEGERTYPE type_api.NULLTYPE = NULLTYPE +type_api.MATCHTYPE = MATCHTYPE type_api._type_map = _type_map -# this one, there's all kinds of ways to play it, but at the EOD -# there's just a giant dependency cycle between the typing system and -# the expression element system, as you might expect. We can use -# importlaters or whatnot, but the typing system just necessarily has -# to have some kind of connection like this. right now we're injecting the -# _DefaultColumnComparator implementation into the TypeEngine.Comparator interface. -# Alternatively TypeEngine.Comparator could have an "impl" injected, though -# just injecting the base is simpler, error free, and more performant. -class Comparator(_DefaultColumnComparator): - BOOLEANTYPE = BOOLEANTYPE - -TypeEngine.Comparator.__bases__ = (Comparator, ) + TypeEngine.Comparator.__bases__ - +TypeEngine.Comparator.BOOLEANTYPE = BOOLEANTYPE diff --git a/lib/sqlalchemy/sql/type_api.py b/lib/sqlalchemy/sql/type_api.py index 1f534c55ed..2997a8dbfb 100644 --- a/lib/sqlalchemy/sql/type_api.py +++ b/lib/sqlalchemy/sql/type_api.py @@ -1,5 +1,6 @@ # sql/types_api.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -11,13 +12,15 @@ from .. import exc, util from . import operators -from .visitors import Visitable +from .visitors import Visitable, VisitableType # these are back-assigned by sqltypes. BOOLEANTYPE = None INTEGERTYPE = None NULLTYPE = None STRINGTYPE = None +MATCHTYPE = None + class TypeEngine(Visitable): """The ultimate base class for all SQL datatypes. @@ -25,7 +28,8 @@ class TypeEngine(Visitable): Common subclasses of :class:`.TypeEngine` include :class:`.String`, :class:`.Integer`, and :class:`.Boolean`. - For an overview of the SQLAlchemy typing system, see :ref:`types_toplevel`. + For an overview of the SQLAlchemy typing system, see + :ref:`types_toplevel`. .. seealso:: @@ -42,14 +46,55 @@ class Comparator(operators.ColumnOperators): """ + __slots__ = 'expr', 'type' + + default_comparator = None def __init__(self, expr): self.expr = expr + self.type = expr.type + + @util.dependencies('sqlalchemy.sql.default_comparator') + def operate(self, default_comparator, op, *other, **kwargs): + o = default_comparator.operator_lookup[op.__name__] + return o[0](self.expr, op, *(other + o[1:]), **kwargs) + + @util.dependencies('sqlalchemy.sql.default_comparator') + def reverse_operate(self, default_comparator, op, other, **kwargs): + o = default_comparator.operator_lookup[op.__name__] + return o[0](self.expr, op, other, + reverse=True, *o[1:], **kwargs) + + def _adapt_expression(self, op, other_comparator): + """evaluate the return type of , + and apply any adaptations to the given operator. + + This method determines the type of a resulting binary expression + given two source types and an operator. For example, two + :class:`.Column` objects, both of the type :class:`.Integer`, will + produce a :class:`.BinaryExpression` that also has the type + :class:`.Integer` when compared via the addition (``+``) operator. + However, using the addition operator with an :class:`.Integer` + and a :class:`.Date` object will produce a :class:`.Date`, assuming + "days delta" behavior by the database (in reality, most databases + other than Postgresql don't accept this particular operation). + + The method returns a tuple of the form , . + The resulting operator and type will be those applied to the + resulting :class:`.BinaryExpression` as the final operator and the + right-hand side of the expression. + + Note that only a subset of operators make usage of + :meth:`._adapt_expression`, + including math operators and user-defined operators, but not + boolean comparison or special SQL keywords like MATCH or BETWEEN. + + """ + return op, other_comparator.type def __reduce__(self): return _reconstitute_comparator, (self.expr, ) - hashable = True """Flag, if False, means values from this type aren't hashable. @@ -83,6 +128,33 @@ def __reduce__(self): """ + def compare_against_backend(self, dialect, conn_type): + """Compare this type against the given backend type. + + This function is currently not implemented for SQLAlchemy + types, and for all built in types will return ``None``. However, + it can be implemented by a user-defined type + where it can be consumed by schema comparison tools such as + Alembic autogenerate. + + A future release of SQLAlchemy will potentially impement this method + for builtin types as well. + + The function should return True if this type is equivalent to the + given type; the type is typically reflected from the database + so should be database specific. The dialect in use is also + passed. It can also return False to assert that the type is + not equivalent. + + :param dialect: a :class:`.Dialect` that is involved in the comparison. + + :param conn_type: the type object reflected from the backend. + + .. versionadded:: 1.0.3 + + """ + return None + def copy_value(self, value): return value @@ -250,7 +322,7 @@ def with_variant(self, type_, dialect_name): The construction of :meth:`.TypeEngine.with_variant` is always from the "fallback" type to that which is dialect specific. The returned type is an instance of :class:`.Variant`, which - itself provides a :meth:`~sqlalchemy.types.Variant.with_variant` + itself provides a :meth:`.Variant.with_variant` that can be called repeatedly. :param type_: a :class:`.TypeEngine` that will be selected @@ -262,8 +334,7 @@ def with_variant(self, type_, dialect_name): .. versionadded:: 0.7.2 """ - return Variant(self, {dialect_name: type_}) - + return Variant(self, {dialect_name: to_instance(type_)}) @util.memoized_property def _type_affinity(self): @@ -289,7 +360,6 @@ def dialect_impl(self, dialect): except KeyError: return self._dialect_info(dialect)['impl'] - def _cached_literal_processor(self, dialect): """Return a dialect-specific literal processor for this type.""" try: @@ -351,7 +421,6 @@ def adapt(self, cls, **kw): """ return util.constructor_copy(self, cls, **kw) - def coerce_compared_value(self, op, value): """Suggest a type for a 'coerced' Python value in an expression. @@ -373,7 +442,7 @@ def coerce_compared_value(self, op, value): """ _coerced_type = _type_map.get(type(value), NULLTYPE) if _coerced_type is NULLTYPE or _coerced_type._type_affinity \ - is self._type_affinity: + is self._type_affinity: return self else: return _coerced_type @@ -410,14 +479,19 @@ def _default_dialect(self, default): def __str__(self): if util.py2k: return unicode(self.compile()).\ - encode('ascii', 'backslashreplace') + encode('ascii', 'backslashreplace') else: return str(self.compile()) def __repr__(self): return util.generic_repr(self) -class UserDefinedType(TypeEngine): + +class VisitableCheckKWArg(util.EnsureKWArgType, VisitableType): + pass + + +class UserDefinedType(util.with_metaclass(VisitableCheckKWArg, TypeEngine)): """Base for user defined types. This should be the base of new types. Note that @@ -430,7 +504,7 @@ class MyType(types.UserDefinedType): def __init__(self, precision = 8): self.precision = precision - def get_col_spec(self): + def get_col_spec(self, **kw): return "MYTYPE(%s)" % self.precision def bind_processor(self, dialect): @@ -450,19 +524,34 @@ def process(value): Column('data', MyType(16)) ) + The ``get_col_spec()`` method will in most cases receive a keyword + argument ``type_expression`` which refers to the owning expression + of the type as being compiled, such as a :class:`.Column` or + :func:`.cast` construct. This keyword is only sent if the method + accepts keyword arguments (e.g. ``**kw``) in its argument signature; + introspection is used to check for this in order to support legacy + forms of this function. + + .. versionadded:: 1.0.0 the owning expression is passed to + the ``get_col_spec()`` method via the keyword argument + ``type_expression``, if it receives ``**kw`` in its signature. + """ __visit_name__ = "user_defined" + ensure_kwarg = 'get_col_spec' class Comparator(TypeEngine.Comparator): + __slots__ = () + def _adapt_expression(self, op, other_comparator): if hasattr(self.type, 'adapt_operator'): util.warn_deprecated( "UserDefinedType.adapt_operator is deprecated. Create " - "a UserDefinedType.Comparator subclass instead which " - "generates the desired expression constructs, given a " - "particular operator." - ) + "a UserDefinedType.Comparator subclass instead which " + "generates the desired expression constructs, given a " + "particular operator." + ) return self.type.adapt_operator(op), self.type else: return op, self.type @@ -567,6 +656,26 @@ def coerce_compared_value(self, op, value): else: return self + .. warning:: + + Note that the **behavior of coerce_compared_value is not inherited + by default from that of the base type**. + If the :class:`.TypeDecorator` is augmenting a + type that requires special logic for certain types of operators, + this method **must** be overridden. A key example is when decorating + the :class:`.postgresql.JSON` and :class:`.postgresql.JSONB` types; + the default rules of :meth:`.TypeEngine.coerce_compared_value` should + be used in order to deal with operators like index operations:: + + class MyJsonType(TypeDecorator): + impl = postgresql.JSON + + def coerce_compared_value(self, op, value): + return self.impl.coerce_compared_value(op, value) + + Without the above step, index operations such as ``mycol['foo']`` + will cause the index value ``'foo'`` to be JSON encoded. + """ __visit_name__ = "type_decorator" @@ -601,7 +710,8 @@ def __init__(self, *args, **kwargs): level to "IS " when compared using ``==`` (and same for ``IS NOT`` in conjunction with ``!=``. - For most SQLAlchemy types, this includes ``NoneType``, as well as ``bool``. + For most SQLAlchemy types, this includes ``NoneType``, as well as + ``bool``. :class:`.TypeDecorator` modifies this list to only include ``NoneType``, as typedecorator implementations that deal with boolean types are common. @@ -617,22 +727,27 @@ def __init__(self, *args, **kwargs): """ class Comparator(TypeEngine.Comparator): + __slots__ = () def operate(self, op, *other, **kwargs): kwargs['_python_is_types'] = self.expr.type.coerce_to_is_types return super(TypeDecorator.Comparator, self).operate( - op, *other, **kwargs) + op, *other, **kwargs) def reverse_operate(self, op, other, **kwargs): kwargs['_python_is_types'] = self.expr.type.coerce_to_is_types return super(TypeDecorator.Comparator, self).reverse_operate( - op, other, **kwargs) + op, other, **kwargs) @property def comparator_factory(self): - return type("TDComparator", - (TypeDecorator.Comparator, self.impl.comparator_factory), - {}) + if TypeDecorator.Comparator in self.impl.comparator_factory.__mro__: + return self.impl.comparator_factory + else: + return type("TDComparator", + (TypeDecorator.Comparator, + self.impl.comparator_factory), + {}) def _gen_dialect_impl(self, dialect): """ @@ -650,8 +765,8 @@ def _gen_dialect_impl(self, dialect): if not isinstance(tt, self.__class__): raise AssertionError('Type object %s does not properly ' 'implement the copy() method, it must ' - 'return an object of type %s' % (self, - self.__class__)) + 'return an object of type %s' % + (self, self.__class__)) tt.impl = typedesc return tt @@ -675,7 +790,7 @@ def type_engine(self, dialect): """ adapted = dialect.type_descriptor(self) - if type(adapted) is not type(self): + if not isinstance(adapted, type(self)): return adapted elif isinstance(self.impl, TypeDecorator): return self.impl.type_engine(dialect) @@ -795,16 +910,19 @@ def literal_processor(self, dialect): """Provide a literal processing function for the given :class:`.Dialect`. - Subclasses here will typically override :meth:`.TypeDecorator.process_literal_param` - instead of this method directly. + Subclasses here will typically override + :meth:`.TypeDecorator.process_literal_param` instead of this method + directly. - By default, this method makes use of :meth:`.TypeDecorator.process_bind_param` - if that method is implemented, where :meth:`.TypeDecorator.process_literal_param` - is not. The rationale here is that :class:`.TypeDecorator` typically deals - with Python conversions of data that are above the layer of database - presentation. With the value converted by :meth:`.TypeDecorator.process_bind_param`, - the underlying type will then handle whether it needs to be presented to the - DBAPI as a bound parameter or to the database as an inline SQL value. + By default, this method makes use of + :meth:`.TypeDecorator.process_bind_param` if that method is + implemented, where :meth:`.TypeDecorator.process_literal_param` is + not. The rationale here is that :class:`.TypeDecorator` typically + deals with Python conversions of data that are above the layer of + database presentation. With the value converted by + :meth:`.TypeDecorator.process_bind_param`, the underlying type will + then handle whether it needs to be presented to the DBAPI as a bound + parameter or to the database as an inline SQL value. .. versionadded:: 0.9.0 @@ -902,7 +1020,7 @@ def result_processor(self, dialect, coltype): if self._has_result_processor: process_value = self.process_result_value impl_processor = self.impl.result_processor(dialect, - coltype) + coltype) if impl_processor: def process(value): return process_value(impl_processor(value), dialect) @@ -1026,6 +1144,12 @@ def with_variant(self, type_, dialect_name): mapping[dialect_name] = type_ return Variant(self.impl, mapping) + @property + def comparator_factory(self): + """express comparison behavior in terms of the base type""" + return self.impl.comparator_factory + + def _reconstitute_comparator(expression): return expression.comparator @@ -1050,7 +1174,7 @@ def adapt_type(typeobj, colspecs): except KeyError: pass else: - # couldnt adapt - so just return the type itself + # couldn't adapt - so just return the type itself # (it may be a user-defined type) return typeobj # if we adapted the given generic type to a database-specific type, @@ -1060,5 +1184,3 @@ def adapt_type(typeobj, colspecs): if (issubclass(typeobj.__class__, impltype)): return typeobj return typeobj.adapt(impltype) - - diff --git a/lib/sqlalchemy/sql/util.py b/lib/sqlalchemy/sql/util.py index 50ce30aafd..665814f845 100644 --- a/lib/sqlalchemy/sql/util.py +++ b/lib/sqlalchemy/sql/util.py @@ -1,5 +1,6 @@ # sql/util.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -15,13 +16,14 @@ from collections import deque from .elements import BindParameter, ColumnClause, ColumnElement, \ - Null, UnaryExpression, literal_column, Label + Null, UnaryExpression, literal_column, Label, _label_reference, \ + _textual_label_reference from .selectable import ScalarSelect, Join, FromClause, FromGrouping from .schema import Column join_condition = util.langhelpers.public_factory( - Join._join_condition, - ".sql.util.join_condition") + Join._join_condition, + ".sql.util.join_condition") # names that are still being imported from the outside from .annotation import _shallow_annotate, _deep_annotate, _deep_deannotate @@ -94,11 +96,11 @@ def my_fn(binary, left, right) def visit(element): if isinstance(element, ScalarSelect): - # we dont want to dig into correlated subqueries, + # we don't want to dig into correlated subqueries, # those are just column elements by themselves yield element elif element.__visit_name__ == 'binary' and \ - operators.is_comparison(element.operator): + operators.is_comparison(element.operator): stack.insert(0, element) for l in visit(element.left): for r in visit(element.right): @@ -134,7 +136,7 @@ def find_tables(clause, check_columns=False, if include_crud: _visitors['insert'] = _visitors['update'] = \ - _visitors['delete'] = lambda ent: tables.append(ent.table) + _visitors['delete'] = lambda ent: tables.append(ent.table) if check_columns: def visit_column(column): @@ -147,7 +149,6 @@ def visit_column(column): return tables - def unwrap_order_by(clause): """Break up an 'order by' expression into individual column-expressions, without DESC/ASC/NULLS FIRST/NULLS LAST""" @@ -158,9 +159,13 @@ def unwrap_order_by(clause): t = stack.popleft() if isinstance(t, ColumnElement) and \ ( - not isinstance(t, UnaryExpression) or \ + not isinstance(t, UnaryExpression) or not operators.is_ordering_modifier(t.modifier) - ): + ): + if isinstance(t, _label_reference): + t = t.element + if isinstance(t, (_textual_label_reference)): + continue cols.add(t) else: for c in t.get_children(): @@ -168,6 +173,16 @@ def unwrap_order_by(clause): return cols +def unwrap_label_reference(element): + def replace(elem): + if isinstance(elem, (_label_reference, _textual_label_reference)): + return elem.element + + return visitors.replacement_traverse( + element, {}, replace + ) + + def clause_is_present(clause, search): """Given a target clause and a second to search within, return True if the target is plainly present in the search without any @@ -183,6 +198,7 @@ def clause_is_present(clause, search): else: return False + def surface_selectables(clause): stack = [clause] while stack: @@ -193,14 +209,16 @@ def surface_selectables(clause): elif isinstance(elem, FromGrouping): stack.append(elem.element) + def selectables_overlap(left, right): """Return True if left/right have some overlapping selectable""" return bool( - set(surface_selectables(left)).intersection( - surface_selectables(right) - ) - ) + set(surface_selectables(left)).intersection( + surface_selectables(right) + ) + ) + def bind_values(clause): """Return an ordered list of "bound" values in the given clause. @@ -236,26 +254,25 @@ class _repr_params(object): display to the given number of 'multi' parameter sets. """ + def __init__(self, params, batches): self.params = params self.batches = batches def __repr__(self): if isinstance(self.params, (list, tuple)) and \ - len(self.params) > self.batches and \ - isinstance(self.params[0], (list, dict, tuple)): + len(self.params) > self.batches and \ + isinstance(self.params[0], (list, dict, tuple)): msg = " ... displaying %i of %i total bound parameter sets ... " return ' '.join(( - repr(self.params[:self.batches - 2])[0:-1], - msg % (self.batches, len(self.params)), - repr(self.params[-2:])[1:] - )) + repr(self.params[:self.batches - 2])[0:-1], + msg % (self.batches, len(self.params)), + repr(self.params[-2:])[1:] + )) else: return repr(self.params) - - def adapt_criterion_to_null(crit, nulls): """given criterion containing bind params, convert selected elements to IS NULL. @@ -264,14 +281,14 @@ def adapt_criterion_to_null(crit, nulls): def visit_binary(binary): if isinstance(binary.left, BindParameter) \ - and binary.left._identifying_key in nulls: + and binary.left._identifying_key in nulls: # reverse order if the NULL is on the left side binary.left = binary.right binary.right = Null() binary.operator = operators.is_ binary.negate = operators.isnot elif isinstance(binary.right, BindParameter) \ - and binary.right._identifying_key in nulls: + and binary.right._identifying_key in nulls: binary.right = Null() binary.operator = operators.is_ binary.negate = operators.isnot @@ -319,9 +336,9 @@ def reduce_columns(columns, *clauses, **kw): \**kw may specify 'ignore_nonexistent_tables' to ignore foreign keys whose tables are not yet configured, or columns that aren't yet present. - This function is primarily used to determine the most minimal "primary key" - from a selectable, by reducing the set of primary key columns present - in the the selectable to just those that are not repeated. + This function is primarily used to determine the most minimal "primary + key" from a selectable, by reducing the set of primary key columns present + in the selectable to just those that are not repeated. """ ignore_nonexistent_tables = kw.pop('ignore_nonexistent_tables', False) @@ -352,21 +369,21 @@ def reduce_columns(columns, *clauses, **kw): else: raise if fk_col.shares_lineage(c) and \ - (not only_synonyms or \ - c.name == col.name): + (not only_synonyms or + c.name == col.name): omit.add(col) break if clauses: def visit_binary(binary): if binary.operator == operators.eq: - cols = util.column_set(chain(*[c.proxy_set - for c in columns.difference(omit)])) + cols = util.column_set( + chain(*[c.proxy_set for c in columns.difference(omit)])) if binary.left in cols and binary.right in cols: for c in reversed(columns): if c.shares_lineage(binary.right) and \ - (not only_synonyms or \ - c.name == binary.left.name): + (not only_synonyms or + c.name == binary.left.name): omit.add(c) break for clause in clauses: @@ -377,7 +394,7 @@ def visit_binary(binary): def criterion_as_pairs(expression, consider_as_foreign_keys=None, - consider_as_referenced_keys=None, any_operator=False): + consider_as_referenced_keys=None, any_operator=False): """traverse an expression and locate binary criterion pairs.""" if consider_as_foreign_keys and consider_as_referenced_keys: @@ -386,37 +403,37 @@ def criterion_as_pairs(expression, consider_as_foreign_keys=None, "'consider_as_referenced_keys'") def col_is(a, b): - #return a is b + # return a is b return a.compare(b) def visit_binary(binary): if not any_operator and binary.operator is not operators.eq: return if not isinstance(binary.left, ColumnElement) or \ - not isinstance(binary.right, ColumnElement): + not isinstance(binary.right, ColumnElement): return if consider_as_foreign_keys: if binary.left in consider_as_foreign_keys and \ - (col_is(binary.right, binary.left) or - binary.right not in consider_as_foreign_keys): + (col_is(binary.right, binary.left) or + binary.right not in consider_as_foreign_keys): pairs.append((binary.right, binary.left)) elif binary.right in consider_as_foreign_keys and \ - (col_is(binary.left, binary.right) or - binary.left not in consider_as_foreign_keys): + (col_is(binary.left, binary.right) or + binary.left not in consider_as_foreign_keys): pairs.append((binary.left, binary.right)) elif consider_as_referenced_keys: if binary.left in consider_as_referenced_keys and \ - (col_is(binary.right, binary.left) or - binary.right not in consider_as_referenced_keys): + (col_is(binary.right, binary.left) or + binary.right not in consider_as_referenced_keys): pairs.append((binary.left, binary.right)) elif binary.right in consider_as_referenced_keys and \ - (col_is(binary.left, binary.right) or - binary.left not in consider_as_referenced_keys): + (col_is(binary.left, binary.right) or + binary.left not in consider_as_referenced_keys): pairs.append((binary.right, binary.left)) else: if isinstance(binary.left, Column) and \ - isinstance(binary.right, Column): + isinstance(binary.right, Column): if binary.left.references(binary.right): pairs.append((binary.right, binary.left)) elif binary.right.references(binary.left): @@ -426,36 +443,6 @@ def visit_binary(binary): return pairs - -class AliasedRow(object): - """Wrap a RowProxy with a translation map. - - This object allows a set of keys to be translated - to those present in a RowProxy. - - """ - def __init__(self, row, map): - # AliasedRow objects don't nest, so un-nest - # if another AliasedRow was passed - if isinstance(row, AliasedRow): - self.row = row.row - else: - self.row = row - self.map = map - - def __contains__(self, key): - return self.map[key] in self.row - - def has_key(self, key): - return key in self - - def __getitem__(self, key): - return self.row[self.map[key]] - - def keys(self): - return self.row.keys() - - class ClauseAdapter(visitors.ReplacingCloningVisitor): """Clones and modifies clauses based on column correspondence. @@ -482,45 +469,38 @@ class ClauseAdapter(visitors.ReplacingCloningVisitor): s.c.col1 == table2.c.col1 """ + def __init__(self, selectable, equivalents=None, - include=None, exclude=None, - include_fn=None, exclude_fn=None, - adapt_on_names=False): - self.__traverse_options__ = {'stop_on': [selectable]} + include_fn=None, exclude_fn=None, + adapt_on_names=False, anonymize_labels=False): + self.__traverse_options__ = { + 'stop_on': [selectable], + 'anonymize_labels': anonymize_labels} self.selectable = selectable - if include: - assert not include_fn - self.include_fn = lambda e: e in include - else: - self.include_fn = include_fn - if exclude: - assert not exclude_fn - self.exclude_fn = lambda e: e in exclude - else: - self.exclude_fn = exclude_fn + self.include_fn = include_fn + self.exclude_fn = exclude_fn self.equivalents = util.column_dict(equivalents or {}) self.adapt_on_names = adapt_on_names def _corresponding_column(self, col, require_embedded, _seen=util.EMPTY_SET): newcol = self.selectable.corresponding_column( - col, - require_embedded=require_embedded) + col, + require_embedded=require_embedded) if newcol is None and col in self.equivalents and col not in _seen: for equiv in self.equivalents[col]: - newcol = self._corresponding_column(equiv, - require_embedded=require_embedded, - _seen=_seen.union([col])) + newcol = self._corresponding_column( + equiv, require_embedded=require_embedded, + _seen=_seen.union([col])) if newcol is not None: return newcol if self.adapt_on_names and newcol is None: newcol = self.selectable.c.get(col.name) return newcol - magic_flag = False def replace(self, col): - if not self.magic_flag and isinstance(col, FromClause) and \ - self.selectable.is_derived_from(col): + if isinstance(col, FromClause) and \ + self.selectable.is_derived_from(col): return self.selectable elif not isinstance(col, ColumnElement): return None @@ -535,60 +515,102 @@ def replace(self, col): class ColumnAdapter(ClauseAdapter): """Extends ClauseAdapter with extra utility functions. - Provides the ability to "wrap" this ClauseAdapter - around another, a columns dictionary which returns - adapted elements given an original, and an - adapted_row() factory. + Key aspects of ColumnAdapter include: + + * Expressions that are adapted are stored in a persistent + .columns collection; so that an expression E adapted into + an expression E1, will return the same object E1 when adapted + a second time. This is important in particular for things like + Label objects that are anonymized, so that the ColumnAdapter can + be used to present a consistent "adapted" view of things. + + * Exclusion of items from the persistent collection based on + include/exclude rules, but also independent of hash identity. + This because "annotated" items all have the same hash identity as their + parent. + + * "wrapping" capability is added, so that the replacement of an expression + E can proceed through a series of adapters. This differs from the + visitor's "chaining" feature in that the resulting object is passed + through all replacing functions unconditionally, rather than stopping + at the first one that returns non-None. + + * An adapt_required option, used by eager loading to indicate that + We don't trust a result row column that is not translated. + This is to prevent a column from being interpreted as that + of the child row in a self-referential scenario, see + inheritance/test_basic.py->EagerTargetingTest.test_adapt_stringency """ + def __init__(self, selectable, equivalents=None, - chain_to=None, include=None, - exclude=None, adapt_required=False): - ClauseAdapter.__init__(self, selectable, equivalents, include, exclude) + chain_to=None, adapt_required=False, + include_fn=None, exclude_fn=None, + adapt_on_names=False, + allow_label_resolve=True, + anonymize_labels=False): + ClauseAdapter.__init__(self, selectable, equivalents, + include_fn=include_fn, exclude_fn=exclude_fn, + adapt_on_names=adapt_on_names, + anonymize_labels=anonymize_labels) + if chain_to: self.chain(chain_to) self.columns = util.populate_column_dict(self._locate_col) + if self.include_fn or self.exclude_fn: + self.columns = self._IncludeExcludeMapping(self, self.columns) self.adapt_required = adapt_required + self.allow_label_resolve = allow_label_resolve + self._wrap = None + + class _IncludeExcludeMapping(object): + def __init__(self, parent, columns): + self.parent = parent + self.columns = columns + + def __getitem__(self, key): + if ( + self.parent.include_fn and not self.parent.include_fn(key) + ) or ( + self.parent.exclude_fn and self.parent.exclude_fn(key) + ): + if self.parent._wrap: + return self.parent._wrap.columns[key] + else: + return key + return self.columns[key] def wrap(self, adapter): ac = self.__class__.__new__(self.__class__) - ac.__dict__ = self.__dict__.copy() - ac._locate_col = ac._wrap(ac._locate_col, adapter._locate_col) - ac.adapt_clause = ac._wrap(ac.adapt_clause, adapter.adapt_clause) - ac.adapt_list = ac._wrap(ac.adapt_list, adapter.adapt_list) + ac.__dict__.update(self.__dict__) + ac._wrap = adapter ac.columns = util.populate_column_dict(ac._locate_col) + if ac.include_fn or ac.exclude_fn: + ac.columns = self._IncludeExcludeMapping(ac, ac.columns) + return ac - adapt_clause = ClauseAdapter.traverse - adapt_list = ClauseAdapter.copy_and_process + def traverse(self, obj): + return self.columns[obj] - def _wrap(self, local, wrapped): - def locate(col): - col = local(col) - return wrapped(col) - return locate + adapt_clause = traverse + adapt_list = ClauseAdapter.copy_and_process def _locate_col(self, col): - c = self._corresponding_column(col, True) - if c is None: - c = self.adapt_clause(col) - - # anonymize labels in case they have a hardcoded name - if isinstance(c, Label): - c = c.label(None) - - # adapt_required used by eager loading to indicate that - # we don't trust a result row column that is not translated. - # this is to prevent a column from being interpreted as that - # of the child row in a self-referential scenario, see - # inheritance/test_basic.py->EagerTargetingTest.test_adapt_stringency + + c = ClauseAdapter.traverse(self, col) + + if self._wrap: + c2 = self._wrap._locate_col(c) + if c2 is not None: + c = c2 + if self.adapt_required and c is col: return None - return c + c._allow_label_resolve = self.allow_label_resolve - def adapted_row(self, row): - return AliasedRow(row, self.columns) + return c def __getstate__(self): d = self.__dict__.copy() @@ -598,4 +620,3 @@ def __getstate__(self): def __setstate__(self, state): self.__dict__.update(state) self.columns = util.PopulateDict(self._locate_col) - diff --git a/lib/sqlalchemy/sql/visitors.py b/lib/sqlalchemy/sql/visitors.py index d9ad04fc06..d12213e353 100644 --- a/lib/sqlalchemy/sql/visitors.py +++ b/lib/sqlalchemy/sql/visitors.py @@ -1,5 +1,6 @@ # sql/visitors.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -29,10 +30,10 @@ from .. import exc __all__ = ['VisitableType', 'Visitable', 'ClauseVisitor', - 'CloningVisitor', 'ReplacingCloningVisitor', 'iterate', - 'iterate_depthfirst', 'traverse_using', 'traverse', - 'traverse_depthfirst', - 'cloned_traverse', 'replacement_traverse'] + 'CloningVisitor', 'ReplacingCloningVisitor', 'iterate', + 'iterate_depthfirst', 'traverse_using', 'traverse', + 'traverse_depthfirst', + 'cloned_traverse', 'replacement_traverse'] class VisitableType(type): @@ -50,6 +51,7 @@ def _compiler_dispatch (self, visitor, **kw): Classes having no __visit_name__ attribute will remain unaffected. """ + def __init__(cls, clsname, bases, clsdict): if clsname != 'Visitable' and \ hasattr(cls, '__visit_name__'): @@ -91,7 +93,7 @@ def _compiler_dispatch(self, visitor, **kw): return meth(self, **kw) _compiler_dispatch.__doc__ = \ - """Look for an attribute named "visit_" + self.__visit_name__ + """Look for an attribute named "visit_" + self.__visit_name__ on the visitor, and call it with the same kw params. """ cls._compiler_dispatch = _compiler_dispatch @@ -211,12 +213,19 @@ def iterate(obj, opts): traversal is configured to be breadth-first. """ + # fasttrack for atomic elements like columns + children = obj.get_children(**opts) + if not children: + return [obj] + + traversal = deque() stack = deque([obj]) while stack: t = stack.popleft() - yield t + traversal.append(t) for c in t.get_children(**opts): stack.append(c) + return iter(traversal) def iterate_depthfirst(obj, opts): @@ -225,6 +234,11 @@ def iterate_depthfirst(obj, opts): traversal is configured to be depth-first. """ + # fasttrack for atomic elements like columns + children = obj.get_children(**opts) + if not children: + return [obj] + stack = deque([obj]) traversal = deque() while stack: @@ -296,7 +310,7 @@ def replacement_traverse(obj, opts, replace): def clone(elem, **kw): if id(elem) in stop_on or \ - 'no_replacement_traverse' in elem._annotations: + 'no_replacement_traverse' in elem._annotations: return elem else: newelem = replace(elem) diff --git a/lib/sqlalchemy/testing/__init__.py b/lib/sqlalchemy/testing/__init__.py index 9549064322..4e02227c31 100644 --- a/lib/sqlalchemy/testing/__init__.py +++ b/lib/sqlalchemy/testing/__init__.py @@ -1,32 +1,36 @@ # testing/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -from .warnings import testing_warn, assert_warnings, resetwarnings +from .warnings import assert_warnings from . import config from .exclusions import db_spec, _is_excluded, fails_if, skip_if, future,\ fails_on, fails_on_everything_except, skip, only_on, exclude, \ - against as _against, _server_version, only_if + against as _against, _server_version, only_if, fails def against(*queries): return _against(config._current, *queries) from .assertions import emits_warning, emits_warning_on, uses_deprecated, \ - eq_, ne_, is_, is_not_, startswith_, assert_raises, \ - assert_raises_message, AssertsCompiledSQL, ComparesTables, \ - AssertsExecutionResults, expect_deprecated + eq_, ne_, le_, is_, is_not_, startswith_, assert_raises, \ + assert_raises_message, AssertsCompiledSQL, ComparesTables, \ + AssertsExecutionResults, expect_deprecated, expect_warnings, \ + in_, not_in_ -from .util import run_as_contextmanager, rowset, fail, provide_metadata, adict +from .util import run_as_contextmanager, rowset, fail, \ + provide_metadata, adict, force_drop_names, \ + teardown_events crashes = skip from .config import db from .config import requirements as requires -from . import mock \ No newline at end of file +from . import mock diff --git a/lib/sqlalchemy/testing/assertions.py b/lib/sqlalchemy/testing/assertions.py index edc9df047b..492adcd62a 100644 --- a/lib/sqlalchemy/testing/assertions.py +++ b/lib/sqlalchemy/testing/assertions.py @@ -1,5 +1,6 @@ # testing/assertions.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -8,79 +9,88 @@ from . import util as testutil from sqlalchemy import pool, orm, util -from sqlalchemy.engine import default, create_engine, url -from sqlalchemy import exc as sa_exc +from sqlalchemy.engine import default, url from sqlalchemy.util import decorator -from sqlalchemy import types as sqltypes, schema +from sqlalchemy import types as sqltypes, schema, exc as sa_exc import warnings import re -from .warnings import resetwarnings from .exclusions import db_spec, _is_excluded from . import assertsql from . import config -import itertools from .util import fail import contextlib +from . import mock + + +def expect_warnings(*messages, **kw): + """Context manager which expects one or more warnings. + + With no arguments, squelches all SAWarnings emitted via + sqlalchemy.util.warn and sqlalchemy.util.warn_limited. Otherwise + pass string expressions that will match selected warnings via regex; + all non-matching warnings are sent through. + + The expect version **asserts** that the warnings were in fact seen. + + Note that the test suite sets SAWarning warnings to raise exceptions. + + """ + return _expect_warnings(sa_exc.SAWarning, messages, **kw) + + +@contextlib.contextmanager +def expect_warnings_on(db, *messages, **kw): + """Context manager which expects one or more warnings on specific + dialects. + + The expect version **asserts** that the warnings were in fact seen. + + """ + spec = db_spec(db) + + if isinstance(db, util.string_types) and not spec(config._current): + yield + else: + with expect_warnings(*messages, **kw): + yield def emits_warning(*messages): - """Mark a test as emitting a warning. + """Decorator form of expect_warnings(). + + Note that emits_warning does **not** assert that the warnings + were in fact seen. - With no arguments, squelches all SAWarning failures. Or pass one or more - strings; these will be matched to the root of the warning description by - warnings.filterwarnings(). """ - # TODO: it would be nice to assert that a named warning was - # emitted. should work with some monkeypatching of warnings, - # and may work on non-CPython if they keep to the spirit of - # warnings.showwarning's docstring. - # - update: jython looks ok, it uses cpython's module @decorator def decorate(fn, *args, **kw): - # todo: should probably be strict about this, too - filters = [dict(action='ignore', - category=sa_exc.SAPendingDeprecationWarning)] - if not messages: - filters.append(dict(action='ignore', - category=sa_exc.SAWarning)) - else: - filters.extend(dict(action='ignore', - message=message, - category=sa_exc.SAWarning) - for message in messages) - for f in filters: - warnings.filterwarnings(**f) - try: + with expect_warnings(assert_=False, *messages): return fn(*args, **kw) - finally: - resetwarnings() + return decorate -def emits_warning_on(db, *warnings): +def expect_deprecated(*messages, **kw): + return _expect_warnings(sa_exc.SADeprecationWarning, messages, **kw) + + +def emits_warning_on(db, *messages): """Mark a test as emitting a warning on a specific dialect. With no arguments, squelches all SAWarning failures. Or pass one or more strings; these will be matched to the root of the warning description by warnings.filterwarnings(). - """ - spec = db_spec(db) + Note that emits_warning_on does **not** assert that the warnings + were in fact seen. + + """ @decorator def decorate(fn, *args, **kw): - if isinstance(db, util.string_types): - if not spec(config._current): - return fn(*args, **kw) - else: - wrapped = emits_warning(*warnings)(fn) - return wrapped(*args, **kw) - else: - if not _is_excluded(*db): - return fn(*args, **kw) - else: - wrapped = emits_warning(*warnings)(fn) - return wrapped(*args, **kw) + with expect_warnings_on(db, assert_=False, *messages): + return fn(*args, **kw) + return decorate @@ -94,38 +104,52 @@ def uses_deprecated(*messages): As a special case, you may pass a function name prefixed with // and it will be re-written as needed to match the standard warning verbiage emitted by the sqlalchemy.util.deprecated decorator. + + Note that uses_deprecated does **not** assert that the warnings + were in fact seen. + """ @decorator def decorate(fn, *args, **kw): - with expect_deprecated(*messages): + with expect_deprecated(*messages, assert_=False): return fn(*args, **kw) return decorate + @contextlib.contextmanager -def expect_deprecated(*messages): - # todo: should probably be strict about this, too - filters = [dict(action='ignore', - category=sa_exc.SAPendingDeprecationWarning)] - if not messages: - filters.append(dict(action='ignore', - category=sa_exc.SADeprecationWarning)) +def _expect_warnings(exc_cls, messages, regex=True, assert_=True): + + if regex: + filters = [re.compile(msg, re.I | re.S) for msg in messages] else: - filters.extend( - [dict(action='ignore', - message=message, - category=sa_exc.SADeprecationWarning) - for message in - [(m.startswith('//') and - ('Call to deprecated function ' + m[2:]) or m) - for m in messages]]) - - for f in filters: - warnings.filterwarnings(**f) - try: + filters = messages + + seen = set(filters) + + real_warn = warnings.warn + + def our_warn(msg, exception, *arg, **kw): + if not issubclass(exception, exc_cls): + return real_warn(msg, exception, *arg, **kw) + + if not filters: + return + + for filter_ in filters: + if (regex and filter_.match(msg)) or \ + (not regex and filter_ == msg): + seen.discard(filter_) + break + else: + real_warn(msg, exception, *arg, **kw) + + with mock.patch("warnings.warn", our_warn): yield - finally: - resetwarnings() + + if assert_: + assert not seen, "Warnings were not seen: %s" % \ + ", ".join("%r" % (s.pattern if regex else s) for s in seen) def global_cleanup_assertions(): @@ -139,6 +163,8 @@ def global_cleanup_assertions(): _assert_no_stray_pool_connections() _STRAY_CONNECTION_FAILURES = 0 + + def _assert_no_stray_pool_connections(): global _STRAY_CONNECTION_FAILURES @@ -150,12 +176,11 @@ def _assert_no_stray_pool_connections(): # there's a ref in there. usually just one. if pool._refs: - # OK, let's be somewhat forgiving. Increment a counter, - # we'll allow a couple of these at most. + # OK, let's be somewhat forgiving. _STRAY_CONNECTION_FAILURES += 1 print("Encountered a stray connection in test cleanup: %s" - % str(pool._refs)) + % str(pool._refs)) # then do a real GC sweep. We shouldn't even be here # so a single sweep should really be doing it, otherwise # there's probably a real unreachable cycle somewhere. @@ -164,7 +189,7 @@ def _assert_no_stray_pool_connections(): # if we've already had two of these occurrences, or # after a hard gc sweep we still have pool._refs?! # now we have to raise. - if _STRAY_CONNECTION_FAILURES >= 2 or pool._refs: + if pool._refs: err = str(pool._refs) # but clean out the pool refs collection directly, @@ -172,7 +197,11 @@ def _assert_no_stray_pool_connections(): # so the error doesn't at least keep happening. pool._refs.clear() _STRAY_CONNECTION_FAILURES = 0 - assert False, "Stray conections in cleanup: %s" % err + assert False, "Stray connection refused to leave "\ + "after gc.collect(): %s" % err + elif _STRAY_CONNECTION_FAILURES > 10: + assert False, "Encountered more than 10 stray connections" + _STRAY_CONNECTION_FAILURES = 0 def eq_(a, b, msg=None): @@ -185,6 +214,11 @@ def ne_(a, b, msg=None): assert a != b, msg or "%r == %r" % (a, b) +def le_(a, b, msg=None): + """Assert a <= b, with repr messaging on failure.""" + assert a <= b, msg or "%r != %r" % (a, b) + + def is_(a, b, msg=None): """Assert a is b, with repr messaging on failure.""" assert a is b, msg or "%r is not %r" % (a, b) @@ -195,6 +229,16 @@ def is_not_(a, b, msg=None): assert a is not b, msg or "%r is %r" % (a, b) +def in_(a, b, msg=None): + """Assert a in b, with repr messaging on failure.""" + assert a in b, msg or "%r not in %r" % (a, b) + + +def not_in_(a, b, msg=None): + """Assert a in not b, with repr messaging on failure.""" + assert a not in b, msg or "%r is in %r" % (a, b) + + def startswith_(a, fragment, msg=None): """Assert a.startswith(fragment), with repr messaging on failure.""" assert a.startswith(fragment), msg or "%r does not start with %r" % ( @@ -217,17 +261,19 @@ def assert_raises_message(except_cls, msg, callable_, *args, **kwargs): callable_(*args, **kwargs) assert False, "Callable did not raise an exception" except except_cls as e: - assert re.search(msg, util.text_type(e), re.UNICODE), "%r !~ %s" % (msg, e) + assert re.search( + msg, util.text_type(e), re.UNICODE), "%r !~ %s" % (msg, e) print(util.text_type(e).encode('utf-8')) class AssertsCompiledSQL(object): def assert_compile(self, clause, result, params=None, - checkparams=None, dialect=None, - checkpositional=None, - use_default_dialect=False, - allow_dialect_select=False, - literal_binds=False): + checkparams=None, dialect=None, + checkpositional=None, + check_prefetch=None, + use_default_dialect=False, + allow_dialect_select=False, + literal_binds=False): if use_default_dialect: dialect = default.DefaultDialect() elif allow_dialect_select: @@ -243,7 +289,6 @@ def assert_compile(self, clause, result, params=None, elif isinstance(dialect, util.string_types): dialect = url.URL(dialect).get_dialect()() - kw = {} compile_kwargs = {} @@ -267,10 +312,15 @@ def assert_compile(self, clause, result, params=None, if util.py3k: param_str = param_str.encode('utf-8').decode('ascii', 'ignore') - print(("\nSQL String:\n" + util.text_type(c) + param_str).encode('utf-8')) + print( + ("\nSQL String:\n" + + util.text_type(c) + + param_str).encode('utf-8')) else: - print("\nSQL String:\n" + util.text_type(c).encode('utf-8') + param_str) - + print( + "\nSQL String:\n" + + util.text_type(c).encode('utf-8') + + param_str) cc = re.sub(r'[\n\t]', '', util.text_type(c)) @@ -281,6 +331,8 @@ def assert_compile(self, clause, result, params=None, if checkpositional is not None: p = c.construct_params(params) eq_(tuple([p[x] for x in c.positiontup]), checkpositional) + if check_prefetch is not None: + eq_(c.prefetch, check_prefetch) class ComparesTables(object): @@ -295,7 +347,7 @@ def assert_tables_equal(self, table, reflected_table, strict_types=False): if strict_types: msg = "Type '%s' doesn't correspond to type '%s'" - assert type(reflected_c.type) is type(c.type), \ + assert isinstance(reflected_c.type, type(c.type)), \ msg % (reflected_c.type, c.type) else: self.assert_types_base(reflected_c, c) @@ -317,8 +369,8 @@ def assert_tables_equal(self, table, reflected_table, strict_types=False): def assert_types_base(self, c1, c2): assert c1.type._compare_type_affinity(c2.type),\ - "On column %r, type '%s' doesn't correspond to type '%s'" % \ - (c1.name, c1.type, c2.type) + "On column %r, type '%s' doesn't correspond to type '%s'" % \ + (c1.name, c1.type, c2.type) class AssertsExecutionResults(object): @@ -362,7 +414,8 @@ def __hash__(self): found = util.IdentitySet(result) expected = set([immutabledict(e) for e in expected]) - for wrong in util.itertools_filterfalse(lambda o: type(o) == cls, found): + for wrong in util.itertools_filterfalse(lambda o: + isinstance(o, cls), found): fail('Unexpected type "%s", expected "%s"' % ( type(wrong).__name__, cls.__name__)) @@ -393,31 +446,30 @@ def _compare_item(obj, spec): else: fail( "Expected %s instance with attributes %s not found." % ( - cls.__name__, repr(expected_item))) + cls.__name__, repr(expected_item))) return True + def sql_execution_asserter(self, db=None): + if db is None: + from . import db as db + + return assertsql.assert_engine(db) + def assert_sql_execution(self, db, callable_, *rules): - assertsql.asserter.add_rules(rules) - try: + with self.sql_execution_asserter(db) as asserter: callable_() - assertsql.asserter.statement_complete() - finally: - assertsql.asserter.clear_rules() + asserter.assert_(*rules) - def assert_sql(self, db, callable_, list_, with_sequences=None): - if with_sequences is not None and config.db.dialect.supports_sequences: - rules = with_sequences - else: - rules = list_ + def assert_sql(self, db, callable_, rules): newrules = [] for rule in rules: if isinstance(rule, dict): newrule = assertsql.AllOf(*[ - assertsql.ExactSQL(k, v) for k, v in rule.items() + assertsql.CompiledSQL(k, v) for k, v in rule.items() ]) else: - newrule = assertsql.ExactSQL(*rule) + newrule = assertsql.CompiledSQL(*rule) newrules.append(newrule) self.assert_sql_execution(db, callable_, *newrules) diff --git a/lib/sqlalchemy/testing/assertsql.py b/lib/sqlalchemy/testing/assertsql.py index 3e0d4c9d3c..10f7ca754f 100644 --- a/lib/sqlalchemy/testing/assertsql.py +++ b/lib/sqlalchemy/testing/assertsql.py @@ -1,5 +1,6 @@ # testing/assertsql.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -7,84 +8,151 @@ from ..engine.default import DefaultDialect from .. import util import re +import collections +import contextlib +from .. import event +from sqlalchemy.schema import _DDLCompiles +from sqlalchemy.engine.util import _distill_params +from sqlalchemy.engine import url class AssertRule(object): - def process_execute(self, clauseelement, *multiparams, **params): - pass + is_consumed = False + errormessage = None + consume_statement = True - def process_cursor_execute(self, statement, parameters, context, - executemany): + def process_statement(self, execute_observed): pass - def is_consumed(self): - """Return True if this rule has been consumed, False if not. - - Should raise an AssertionError if this rule's condition has - definitely failed. - - """ - - raise NotImplementedError() - - def rule_passed(self): - """Return True if the last test of this rule passed, False if - failed, None if no test was applied.""" - - raise NotImplementedError() + def no_more_statements(self): + assert False, 'All statements are complete, but pending '\ + 'assertion rules remain' - def consume_final(self): - """Return True if this rule has been consumed. - Should raise an AssertionError if this rule's condition has not - been consumed or has failed. - - """ +class SQLMatchRule(AssertRule): + pass - if self._result is None: - assert False, 'Rule has not been consumed' - return self.is_consumed() +class CursorSQL(SQLMatchRule): + consume_statement = False -class SQLMatchRule(AssertRule): - def __init__(self): - self._result = None - self._errmsg = "" + def __init__(self, statement, params=None): + self.statement = statement + self.params = params - def rule_passed(self): - return self._result + def process_statement(self, execute_observed): + stmt = execute_observed.statements[0] + if self.statement != stmt.statement or ( + self.params is not None and self.params != stmt.parameters): + self.errormessage = \ + "Testing for exact SQL %s parameters %s received %s %s" % ( + self.statement, self.params, + stmt.statement, stmt.parameters + ) + else: + execute_observed.statements.pop(0) + self.is_consumed = True + if not execute_observed.statements: + self.consume_statement = True - def is_consumed(self): - if self._result is None: - return False - assert self._result, self._errmsg +class CompiledSQL(SQLMatchRule): - return True + def __init__(self, statement, params=None, dialect='default'): + self.statement = statement + self.params = params + self.dialect = dialect + def _compare_sql(self, execute_observed, received_statement): + stmt = re.sub(r'[\n\t]', '', self.statement) + return received_statement == stmt -class ExactSQL(SQLMatchRule): + def _compile_dialect(self, execute_observed): + if self.dialect == 'default': + return DefaultDialect() + else: + # ugh + if self.dialect == 'postgresql': + params = {'implicit_returning': True} + else: + params = {} + return url.URL(self.dialect).get_dialect()(**params) - def __init__(self, sql, params=None): - SQLMatchRule.__init__(self) - self.sql = sql - self.params = params + def _received_statement(self, execute_observed): + """reconstruct the statement and params in terms + of a target dialect, which for CompiledSQL is just DefaultDialect.""" - def process_cursor_execute(self, statement, parameters, context, - executemany): - if not context: - return - _received_statement = \ - _process_engine_statement(context.unicode_statement, - context) - _received_parameters = context.compiled_parameters + context = execute_observed.context + compare_dialect = self._compile_dialect(execute_observed) + if isinstance(context.compiled.statement, _DDLCompiles): + compiled = \ + context.compiled.statement.compile(dialect=compare_dialect) + else: + compiled = ( + context.compiled.statement.compile( + dialect=compare_dialect, + column_keys=context.compiled.column_keys, + inline=context.compiled.inline) + ) + _received_statement = re.sub(r'[\n\t]', '', util.text_type(compiled)) + parameters = execute_observed.parameters + + if not parameters: + _received_parameters = [compiled.construct_params()] + else: + _received_parameters = [ + compiled.construct_params(m) for m in parameters] + + return _received_statement, _received_parameters + + def process_statement(self, execute_observed): + context = execute_observed.context + + _received_statement, _received_parameters = \ + self._received_statement(execute_observed) + params = self._all_params(context) + + equivalent = self._compare_sql(execute_observed, _received_statement) + + if equivalent: + if params is not None: + all_params = list(params) + all_received = list(_received_parameters) + while all_params and all_received: + param = dict(all_params.pop(0)) + + for idx, received in enumerate(list(all_received)): + # do a positive compare only + for param_key in param: + # a key in param did not match current + # 'received' + if param_key not in received or \ + received[param_key] != param[param_key]: + break + else: + # all keys in param matched 'received'; + # onto next param + del all_received[idx] + break + else: + # param did not match any entry + # in all_received + equivalent = False + break + if all_params or all_received: + equivalent = False - # TODO: remove this step once all unit tests are migrated, as - # ExactSQL should really be *exact* SQL + if equivalent: + self.is_consumed = True + self.errormessage = None + else: + self.errormessage = self._failure_message(params) % { + 'received_statement': _received_statement, + 'received_parameters': _received_parameters + } - sql = _process_assertion_statement(self.sql, context) - equivalent = _received_statement == sql + def _all_params(self, context): if self.params: if util.callable(self.params): params = self.params(context) @@ -92,124 +160,85 @@ def process_cursor_execute(self, statement, parameters, context, params = self.params if not isinstance(params, list): params = [params] - equivalent = equivalent and params \ - == context.compiled_parameters + return params else: - params = {} - self._result = equivalent - if not self._result: - self._errmsg = \ - 'Testing for exact statement %r exact params %r, '\ - 'received %r with params %r' % (sql, params, - _received_statement, _received_parameters) + return None + def _failure_message(self, expected_params): + return ( + 'Testing for compiled statement %r partial params %r, ' + 'received %%(received_statement)r with params ' + '%%(received_parameters)r' % ( + self.statement.replace('%', '%%'), expected_params + ) + ) -class RegexSQL(SQLMatchRule): +class RegexSQL(CompiledSQL): def __init__(self, regex, params=None): SQLMatchRule.__init__(self) self.regex = re.compile(regex) self.orig_regex = regex self.params = params + self.dialect = 'default' - def process_cursor_execute(self, statement, parameters, context, - executemany): - if not context: - return - _received_statement = \ - _process_engine_statement(context.unicode_statement, - context) - _received_parameters = context.compiled_parameters - equivalent = bool(self.regex.match(_received_statement)) - if self.params: - if util.callable(self.params): - params = self.params(context) - else: - params = self.params - if not isinstance(params, list): - params = [params] + def _failure_message(self, expected_params): + return ( + 'Testing for compiled statement ~%r partial params %r, ' + 'received %%(received_statement)r with params ' + '%%(received_parameters)r' % ( + self.orig_regex, expected_params + ) + ) - # do a positive compare only + def _compare_sql(self, execute_observed, received_statement): + return bool(self.regex.match(received_statement)) - for param, received in zip(params, _received_parameters): - for k, v in param.items(): - if k not in received or received[k] != v: - equivalent = False - break - else: - params = {} - self._result = equivalent - if not self._result: - self._errmsg = \ - 'Testing for regex %r partial params %r, received %r '\ - 'with params %r' % (self.orig_regex, params, - _received_statement, - _received_parameters) +class DialectSQL(CompiledSQL): + def _compile_dialect(self, execute_observed): + return execute_observed.context.dialect -class CompiledSQL(SQLMatchRule): + def _compare_no_space(self, real_stmt, received_stmt): + stmt = re.sub(r'[\n\t]', '', real_stmt) + return received_stmt == stmt - def __init__(self, statement, params=None): - SQLMatchRule.__init__(self) - self.statement = statement - self.params = params + def _received_statement(self, execute_observed): + received_stmt, received_params = super(DialectSQL, self).\ + _received_statement(execute_observed) - def process_cursor_execute(self, statement, parameters, context, - executemany): - if not context: - return - from sqlalchemy.schema import _DDLCompiles - _received_parameters = list(context.compiled_parameters) - - # recompile from the context, using the default dialect - - if isinstance(context.compiled.statement, _DDLCompiles): - compiled = \ - context.compiled.statement.compile(dialect=DefaultDialect()) + # TODO: why do we need this part? + for real_stmt in execute_observed.statements: + if self._compare_no_space(real_stmt.statement, received_stmt): + break else: - compiled = \ - context.compiled.statement.compile(dialect=DefaultDialect(), - column_keys=context.compiled.column_keys) - _received_statement = re.sub(r'[\n\t]', '', str(compiled)) - equivalent = self.statement == _received_statement - if self.params: - if util.callable(self.params): - params = self.params(context) - else: - params = self.params - if not isinstance(params, list): - params = [params] - else: - params = list(params) - all_params = list(params) - all_received = list(_received_parameters) - while params: - param = dict(params.pop(0)) - for k, v in context.compiled.params.items(): - param.setdefault(k, v) - if param not in _received_parameters: - equivalent = False - break - else: - _received_parameters.remove(param) - if _received_parameters: - equivalent = False + raise AssertionError( + "Can't locate compiled statement %r in list of " + "statements actually invoked" % received_stmt) + + return received_stmt, execute_observed.context.compiled_parameters + + def _compare_sql(self, execute_observed, received_statement): + stmt = re.sub(r'[\n\t]', '', self.statement) + # convert our comparison statement to have the + # paramstyle of the received + paramstyle = execute_observed.context.dialect.paramstyle + if paramstyle == 'pyformat': + stmt = re.sub( + r':([\w_]+)', r"%(\1)s", stmt) else: - params = {} - all_params = {} - all_received = [] - self._result = equivalent - if not self._result: - print('Testing for compiled statement %r partial params '\ - '%r, received %r with params %r' % (self.statement, - all_params, _received_statement, all_received)) - self._errmsg = \ - 'Testing for compiled statement %r partial params %r, '\ - 'received %r with params %r' % (self.statement, - all_params, _received_statement, all_received) - - - # print self._errmsg + # positional params + repl = None + if paramstyle == 'qmark': + repl = "?" + elif paramstyle == 'format': + repl = r"%s" + elif paramstyle == 'numeric': + repl = None + stmt = re.sub(r':([\w_]+)', repl, stmt) + + return received_statement == stmt + class CountStatements(AssertRule): @@ -217,21 +246,13 @@ def __init__(self, count): self.count = count self._statement_count = 0 - def process_execute(self, clauseelement, *multiparams, **params): + def process_statement(self, execute_observed): self._statement_count += 1 - def process_cursor_execute(self, statement, parameters, context, - executemany): - pass - - def is_consumed(self): - return False - - def consume_final(self): - assert self.count == self._statement_count, \ - 'desired statement count %d does not match %d' \ - % (self.count, self._statement_count) - return True + def no_more_statements(self): + if self.count != self._statement_count: + assert False, 'desired statement count %d does not match %d' \ + % (self.count, self._statement_count) class AllOf(AssertRule): @@ -239,95 +260,113 @@ class AllOf(AssertRule): def __init__(self, *rules): self.rules = set(rules) - def process_execute(self, clauseelement, *multiparams, **params): - for rule in self.rules: - rule.process_execute(clauseelement, *multiparams, **params) + def process_statement(self, execute_observed): + for rule in list(self.rules): + rule.errormessage = None + rule.process_statement(execute_observed) + if rule.is_consumed: + self.rules.discard(rule) + if not self.rules: + self.is_consumed = True + break + elif not rule.errormessage: + # rule is not done yet + self.errormessage = None + break + else: + self.errormessage = list(self.rules)[0].errormessage + + +class Or(AllOf): - def process_cursor_execute(self, statement, parameters, context, - executemany): + def process_statement(self, execute_observed): for rule in self.rules: - rule.process_cursor_execute(statement, parameters, context, - executemany) + rule.process_statement(execute_observed) + if rule.is_consumed: + self.is_consumed = True + break + else: + self.errormessage = list(self.rules)[0].errormessage - def is_consumed(self): - if not self.rules: - return True - for rule in list(self.rules): - if rule.rule_passed(): # a rule passed, move on - self.rules.remove(rule) - return len(self.rules) == 0 - assert False, 'No assertion rules were satisfied for statement' - def consume_final(self): - return len(self.rules) == 0 +class SQLExecuteObserved(object): + def __init__(self, context, clauseelement, multiparams, params): + self.context = context + self.clauseelement = clauseelement + self.parameters = _distill_params(multiparams, params) + self.statements = [] -def _process_engine_statement(query, context): - if util.jython: +class SQLCursorExecuteObserved( + collections.namedtuple( + "SQLCursorExecuteObserved", + ["statement", "parameters", "context", "executemany"]) +): + pass - # oracle+zxjdbc passes a PyStatement when returning into - query = str(query) - if context.engine.name == 'mssql' \ - and query.endswith('; select scope_identity()'): - query = query[:-25] - query = re.sub(r'\n', '', query) - return query +class SQLAsserter(object): + def __init__(self): + self.accumulated = [] + def _close(self): + self._final = self.accumulated + del self.accumulated -def _process_assertion_statement(query, context): - paramstyle = context.dialect.paramstyle - if paramstyle == 'named': - pass - elif paramstyle == 'pyformat': - query = re.sub(r':([\w_]+)', r"%(\1)s", query) - else: - # positional params - repl = None - if paramstyle == 'qmark': - repl = "?" - elif paramstyle == 'format': - repl = r"%s" - elif paramstyle == 'numeric': - repl = None - query = re.sub(r':([\w_]+)', repl, query) + def assert_(self, *rules): + rules = list(rules) + observed = list(self._final) - return query + while observed and rules: + rule = rules[0] + rule.process_statement(observed[0]) + if rule.is_consumed: + rules.pop(0) + elif rule.errormessage: + assert False, rule.errormessage + if rule.consume_statement: + observed.pop(0) -class SQLAssert(object): + if not observed and rules: + rules[0].no_more_statements() + elif not rules and observed: + assert False, "Additional SQL statements remain" - rules = None - def add_rules(self, rules): - self.rules = list(rules) +@contextlib.contextmanager +def assert_engine(engine): + asserter = SQLAsserter() - def statement_complete(self): - for rule in self.rules: - if not rule.consume_final(): - assert False, \ - 'All statements are complete, but pending '\ - 'assertion rules remain' - - def clear_rules(self): - del self.rules - - def execute(self, conn, clauseelement, multiparams, params, result): - if self.rules is not None: - if not self.rules: - assert False, \ - 'All rules have been exhausted, but further '\ - 'statements remain' - rule = self.rules[0] - rule.process_execute(clauseelement, *multiparams, **params) - if rule.is_consumed(): - self.rules.pop(0) - - def cursor_execute(self, conn, cursor, statement, parameters, - context, executemany): - if self.rules: - rule = self.rules[0] - rule.process_cursor_execute(statement, parameters, context, - executemany) + orig = [] -asserter = SQLAssert() + @event.listens_for(engine, "before_execute") + def connection_execute(conn, clauseelement, multiparams, params): + # grab the original statement + params before any cursor + # execution + orig[:] = clauseelement, multiparams, params + + @event.listens_for(engine, "after_cursor_execute") + def cursor_execute(conn, cursor, statement, parameters, + context, executemany): + if not context: + return + # then grab real cursor statements and associate them all + # around a single context + if asserter.accumulated and \ + asserter.accumulated[-1].context is context: + obs = asserter.accumulated[-1] + else: + obs = SQLExecuteObserved(context, orig[0], orig[1], orig[2]) + asserter.accumulated.append(obs) + obs.statements.append( + SQLCursorExecuteObserved( + statement, parameters, context, executemany) + ) + + try: + yield asserter + finally: + event.remove(engine, "after_cursor_execute", cursor_execute) + event.remove(engine, "before_execute", connection_execute) + asserter._close() diff --git a/lib/sqlalchemy/testing/config.py b/lib/sqlalchemy/testing/config.py index 20af3dd2e5..da5997661e 100644 --- a/lib/sqlalchemy/testing/config.py +++ b/lib/sqlalchemy/testing/config.py @@ -1,5 +1,6 @@ # testing/config.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -11,8 +12,11 @@ db_url = None db_opts = None file_config = None - +test_schema = None +test_schema_2 = None _current = None +_skip_test_exception = None + class Config(object): def __init__(self, db, db_opts, options, file_config): @@ -20,12 +24,14 @@ def __init__(self, db, db_opts, options, file_config): self.db_opts = db_opts self.options = options self.file_config = file_config + self.test_schema = "test_schema" + self.test_schema_2 = "test_schema_2" _stack = collections.deque() _configs = {} @classmethod - def register(cls, db, db_opts, options, file_config, namespace): + def register(cls, db, db_opts, options, file_config): """add a config as one of the global configs. If there are no configs set up yet, this config also @@ -33,25 +39,27 @@ def register(cls, db, db_opts, options, file_config, namespace): """ cfg = Config(db, db_opts, options, file_config) - global _current - if not _current: - cls.set_as_current(cfg, namespace) cls._configs[cfg.db.name] = cfg cls._configs[(cfg.db.name, cfg.db.dialect)] = cfg cls._configs[cfg.db] = cfg + return cfg @classmethod def set_as_current(cls, config, namespace): - global db, _current, db_url + global db, _current, db_url, test_schema, test_schema_2, db_opts _current = config db_url = config.db.url + db_opts = config.db_opts + test_schema = config.test_schema + test_schema_2 = config.test_schema_2 namespace.db = db = config.db @classmethod def push_engine(cls, db, namespace): assert _current, "Can't push without a default Config set up" cls.push( - Config(db, _current.db_opts, _current.options, _current.file_config), + Config( + db, _current.db_opts, _current.options, _current.file_config), namespace ) @@ -75,3 +83,10 @@ def all_configs(cls): def all_dbs(cls): for cfg in cls.all_configs(): yield cfg.db + + def skip_test(self, msg): + skip_test(msg) + + +def skip_test(msg): + raise _skip_test_exception(msg) diff --git a/lib/sqlalchemy/testing/distutils_run.py b/lib/sqlalchemy/testing/distutils_run.py new file mode 100644 index 0000000000..38de8872cb --- /dev/null +++ b/lib/sqlalchemy/testing/distutils_run.py @@ -0,0 +1,11 @@ +"""Quick and easy way to get setup.py test to run py.test without any +custom setuptools/distutils code. + +""" +import unittest +import pytest + + +class TestSuite(unittest.TestCase): + def test_sqlalchemy(self): + pytest.main(["-n", "4", "-q"]) diff --git a/lib/sqlalchemy/testing/engines.py b/lib/sqlalchemy/testing/engines.py index d27be3cde0..def9f3c140 100644 --- a/lib/sqlalchemy/testing/engines.py +++ b/lib/sqlalchemy/testing/engines.py @@ -1,20 +1,19 @@ # testing/engines.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php from __future__ import absolute_import -import types import weakref -from collections import deque from . import config from .util import decorator from .. import event, pool import re import warnings -from .. import util + class ConnectionKiller(object): @@ -38,12 +37,10 @@ def invalidate(self, dbapi_con, con_record, exception): def _safe(self, fn): try: fn() - except (SystemExit, KeyboardInterrupt): - raise except Exception as e: warnings.warn( - "testing_reaper couldn't " - "rollback/close connection: %s" % e) + "testing_reaper couldn't " + "rollback/close connection: %s" % e) def rollback_all(self): for rec in list(self.proxy_refs): @@ -57,7 +54,7 @@ def close_all(self): def _after_test_ctx(self): # this can cause a deadlock with pg8000 - pg8000 acquires - # prepared statment lock inside of rollback() - if async gc + # prepared statement lock inside of rollback() - if async gc # is collecting in finalize_fairy, deadlock. # not sure if this should be if pypy/jython only. # note that firebird/fdb definitely needs this though @@ -101,7 +98,14 @@ def drop_all_tables(metadata, bind): testing_reaper.close_all() if hasattr(bind, 'close'): bind.close() - metadata.drop_all(bind) + + if not config.db.dialect.supports_alter: + from . import assertions + with assertions.expect_warnings( + "Can't sort tables", assert_=False): + metadata.drop_all(bind) + else: + metadata.drop_all(bind) @decorator @@ -169,12 +173,10 @@ def connect(self, *args, **kwargs): def _safe(self, fn): try: fn() - except (SystemExit, KeyboardInterrupt): - raise except Exception as e: warnings.warn( - "ReconnectFixture couldn't " - "close connection: %s" % e) + "ReconnectFixture couldn't " + "close connection: %s" % e) def shutdown(self): # TODO: this doesn't cover all cases @@ -209,7 +211,7 @@ def testing_engine(url=None, options=None): """Produce an engine configured by --options with optional overrides.""" from sqlalchemy import create_engine - from .assertsql import asserter + from sqlalchemy.engine.url import make_url if not options: use_reaper = True @@ -217,15 +219,20 @@ def testing_engine(url=None, options=None): use_reaper = options.pop('use_reaper', True) url = url or config.db.url + + url = make_url(url) if options is None: - options = config.db_opts + if config.db is None or url.drivername == config.db.url.drivername: + options = config.db_opts + else: + options = {} engine = create_engine(url, **options) + engine._has_events = True # enable event blocks, helps with profiling + if isinstance(engine.pool, pool.QueuePool): engine.pool._timeout = 0 engine.pool._max_overflow = 0 - event.listen(engine, 'after_execute', asserter.execute) - event.listen(engine, 'after_cursor_execute', asserter.cursor_execute) if use_reaper: event.listen(engine.pool, 'connect', testing_reaper.connect) event.listen(engine.pool, 'checkout', testing_reaper.checkout) @@ -235,8 +242,6 @@ def testing_engine(url=None, options=None): return engine - - def mock_engine(dialect_name=None): """Provides a mocking engine based on the current testing.db. @@ -261,7 +266,7 @@ def executor(sql, *a, **kw): def assert_sql(stmts): recv = [re.sub(r'[\n\t]', '', str(s)) for s in buffer] - assert recv == stmts, recv + assert recv == stmts, recv def print_sql(): d = engine.dialect @@ -286,10 +291,11 @@ class DBAPIProxyCursor(object): DBAPI-level cursor operations. """ - def __init__(self, engine, conn): + + def __init__(self, engine, conn, *args, **kwargs): self.engine = engine self.connection = conn - self.cursor = conn.cursor() + self.cursor = conn.cursor(*args, **kwargs) def execute(self, stmt, parameters=None, **kw): if parameters: @@ -311,13 +317,14 @@ class DBAPIProxyConnection(object): DBAPI-level connection operations. """ + def __init__(self, engine, cursor_cls): self.conn = self._sqla_unwrap = engine.pool._creator() self.engine = engine self.cursor_cls = cursor_cls - def cursor(self): - return self.cursor_cls(self.engine, self.conn) + def cursor(self, *args, **kwargs): + return self.cursor_cls(self.engine, self.conn, *args, **kwargs) def close(self): self.conn.close() @@ -337,112 +344,3 @@ def mock_conn(): return testing_engine(options={'creator': mock_conn}) -class ReplayableSession(object): - """A simple record/playback tool. - - This is *not* a mock testing class. It only records a session for later - playback and makes no assertions on call consistency whatsoever. It's - unlikely to be suitable for anything other than DB-API recording. - - """ - - Callable = object() - NoAttribute = object() - - if util.py2k: - Natives = set([getattr(types, t) - for t in dir(types) if not t.startswith('_')]).\ - difference([getattr(types, t) - for t in ('FunctionType', 'BuiltinFunctionType', - 'MethodType', 'BuiltinMethodType', - 'LambdaType', 'UnboundMethodType',)]) - else: - Natives = set([getattr(types, t) - for t in dir(types) if not t.startswith('_')]).\ - union([type(t) if not isinstance(t, type) - else t for t in __builtins__.values()]).\ - difference([getattr(types, t) - for t in ('FunctionType', 'BuiltinFunctionType', - 'MethodType', 'BuiltinMethodType', - 'LambdaType', )]) - - def __init__(self): - self.buffer = deque() - - def recorder(self, base): - return self.Recorder(self.buffer, base) - - def player(self): - return self.Player(self.buffer) - - class Recorder(object): - def __init__(self, buffer, subject): - self._buffer = buffer - self._subject = subject - - def __call__(self, *args, **kw): - subject, buffer = [object.__getattribute__(self, x) - for x in ('_subject', '_buffer')] - - result = subject(*args, **kw) - if type(result) not in ReplayableSession.Natives: - buffer.append(ReplayableSession.Callable) - return type(self)(buffer, result) - else: - buffer.append(result) - return result - - @property - def _sqla_unwrap(self): - return self._subject - - def __getattribute__(self, key): - try: - return object.__getattribute__(self, key) - except AttributeError: - pass - - subject, buffer = [object.__getattribute__(self, x) - for x in ('_subject', '_buffer')] - try: - result = type(subject).__getattribute__(subject, key) - except AttributeError: - buffer.append(ReplayableSession.NoAttribute) - raise - else: - if type(result) not in ReplayableSession.Natives: - buffer.append(ReplayableSession.Callable) - return type(self)(buffer, result) - else: - buffer.append(result) - return result - - class Player(object): - def __init__(self, buffer): - self._buffer = buffer - - def __call__(self, *args, **kw): - buffer = object.__getattribute__(self, '_buffer') - result = buffer.popleft() - if result is ReplayableSession.Callable: - return self - else: - return result - - @property - def _sqla_unwrap(self): - return None - - def __getattribute__(self, key): - try: - return object.__getattribute__(self, key) - except AttributeError: - pass - buffer = object.__getattribute__(self, '_buffer') - result = buffer.popleft() - if result is ReplayableSession.Callable: - return self - elif result is ReplayableSession.NoAttribute: - raise AttributeError(key) - else: - return result diff --git a/lib/sqlalchemy/testing/entities.py b/lib/sqlalchemy/testing/entities.py index 9309abfd8d..a5d04decca 100644 --- a/lib/sqlalchemy/testing/entities.py +++ b/lib/sqlalchemy/testing/entities.py @@ -1,5 +1,6 @@ # testing/entities.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -57,7 +58,7 @@ def __eq__(self, other): _recursion_stack.add(id(self)) try: - # pick the entity thats not SA persisted as the source + # pick the entity that's not SA persisted as the source try: self_key = sa.orm.attributes.instance_state(self).key except sa.orm.exc.NO_STATE: @@ -85,7 +86,8 @@ def __eq__(self, other): return False if hasattr(value, '__iter__'): - if hasattr(value, '__getitem__') and not hasattr(value, 'keys'): + if hasattr(value, '__getitem__') and not hasattr( + value, 'keys'): if list(value) != list(battr): return False else: diff --git a/lib/sqlalchemy/testing/exclusions.py b/lib/sqlalchemy/testing/exclusions.py index 00ca28428e..11c9e1b0ba 100644 --- a/lib/sqlalchemy/testing/exclusions.py +++ b/lib/sqlalchemy/testing/exclusions.py @@ -1,90 +1,167 @@ # testing/exclusions.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php import operator -from .plugin.plugin_base import SkipTest from ..util import decorator from . import config from .. import util -import contextlib import inspect +import contextlib +from sqlalchemy.util.compat import inspect_getargspec + + +def skip_if(predicate, reason=None): + rule = compound() + pred = _as_predicate(predicate, reason) + rule.skips.add(pred) + return rule -class skip_if(object): - def __init__(self, predicate, reason=None): - self.predicate = _as_predicate(predicate) - self.reason = reason - _fails_on = None +def fails_if(predicate, reason=None): + rule = compound() + pred = _as_predicate(predicate, reason) + rule.fails.add(pred) + return rule + + +class compound(object): + def __init__(self): + self.fails = set() + self.skips = set() + self.tags = set() def __add__(self, other): - def decorate(fn): - return other(self(fn)) - return decorate + return self.add(other) + + def add(self, *others): + copy = compound() + copy.fails.update(self.fails) + copy.skips.update(self.skips) + copy.tags.update(self.tags) + for other in others: + copy.fails.update(other.fails) + copy.skips.update(other.skips) + copy.tags.update(other.tags) + return copy + + def not_(self): + copy = compound() + copy.fails.update(NotPredicate(fail) for fail in self.fails) + copy.skips.update(NotPredicate(skip) for skip in self.skips) + copy.tags.update(self.tags) + return copy @property def enabled(self): return self.enabled_for_config(config._current) def enabled_for_config(self, config): - return not self.predicate(config) + for predicate in self.skips.union(self.fails): + if predicate(config): + return False + else: + return True + + def matching_config_reasons(self, config): + return [ + predicate._as_string(config) for predicate + in self.skips.union(self.fails) + if predicate(config) + ] + + def include_test(self, include_tags, exclude_tags): + return bool( + not self.tags.intersection(exclude_tags) and + (not include_tags or self.tags.intersection(include_tags)) + ) + + def _extend(self, other): + self.skips.update(other.skips) + self.fails.update(other.fails) + self.tags.update(other.tags) + + def __call__(self, fn): + if hasattr(fn, '_sa_exclusion_extend'): + fn._sa_exclusion_extend._extend(self) + return fn + + @decorator + def decorate(fn, *args, **kw): + return self._do(config._current, fn, *args, **kw) + decorated = decorate(fn) + decorated._sa_exclusion_extend = self + return decorated @contextlib.contextmanager - def fail_if(self, name='block'): + def fail_if(self): + all_fails = compound() + all_fails.fails.update(self.skips.union(self.fails)) + try: yield except Exception as ex: - if self.predicate(config._current): + all_fails._expect_failure(config._current, ex) + else: + all_fails._expect_success(config._current) + + def _do(self, config, fn, *args, **kw): + for skip in self.skips: + if skip(config): + msg = "'%s' : %s" % ( + fn.__name__, + skip._as_string(config) + ) + config.skip_test(msg) + + try: + return_value = fn(*args, **kw) + except Exception as ex: + self._expect_failure(config, ex, name=fn.__name__) + else: + self._expect_success(config, name=fn.__name__) + return return_value + + def _expect_failure(self, config, ex, name='block'): + for fail in self.fails: + if fail(config): print(("%s failed as expected (%s): %s " % ( - name, self.predicate, str(ex)))) - else: - raise + name, fail._as_string(config), str(ex)))) + break else: - if self.predicate(config._current): - raise AssertionError( - "Unexpected success for '%s' (%s)" % - (name, self.predicate)) + util.raise_from_cause(ex) + + def _expect_success(self, config, name='block'): + if not self.fails: + return + for fail in self.fails: + if not fail(config): + break + else: + raise AssertionError( + "Unexpected success for '%s' (%s)" % + ( + name, + " and ".join( + fail._as_string(config) + for fail in self.fails + ) + ) + ) - def __call__(self, fn): - @decorator - def decorate(fn, *args, **kw): - if self.predicate(config._current): - if self.reason: - msg = "'%s' : %s" % ( - fn.__name__, - self.reason - ) - else: - msg = "'%s': %s" % ( - fn.__name__, self.predicate - ) - raise SkipTest(msg) - else: - if self._fails_on: - with self._fails_on.fail_if(name=fn.__name__): - return fn(*args, **kw) - else: - return fn(*args, **kw) - return decorate(fn) - - def fails_on(self, other, reason=None): - self._fails_on = skip_if(other, reason) - return self - - def fails_on_everything_except(self, *dbs): - self._fails_on = skip_if(fails_on_everything_except(*dbs)) - return self - -class fails_if(skip_if): - def __call__(self, fn): - @decorator - def decorate(fn, *args, **kw): - with self.fail_if(name=fn.__name__): - return fn(*args, **kw) - return decorate(fn) + +def requires_tag(tagname): + return tags([tagname]) + + +def tags(tagnames): + comp = compound() + comp.tags.update(tagnames) + return comp def only_if(predicate, reason=None): @@ -99,13 +176,17 @@ def succeeds_if(predicate, reason=None): class Predicate(object): @classmethod - def as_predicate(cls, predicate): - if isinstance(predicate, skip_if): - return NotPredicate(predicate.predicate) + def as_predicate(cls, predicate, description=None): + if isinstance(predicate, compound): + return cls.as_predicate(predicate.enabled_for_config, description) elif isinstance(predicate, Predicate): + if description and predicate.description is None: + predicate.description = description return predicate - elif isinstance(predicate, list): - return OrPredicate([cls.as_predicate(pred) for pred in predicate]) + elif isinstance(predicate, (list, set)): + return OrPredicate( + [cls.as_predicate(pred) for pred in predicate], + description) elif isinstance(predicate, tuple): return SpecPredicate(*predicate) elif isinstance(predicate, util.string_types): @@ -116,12 +197,26 @@ def as_predicate(cls, predicate): op = tokens.pop(0) if tokens: spec = tuple(int(d) for d in tokens.pop(0).split(".")) - return SpecPredicate(db, op, spec) + return SpecPredicate(db, op, spec, description=description) elif util.callable(predicate): - return LambdaPredicate(predicate) + return LambdaPredicate(predicate, description) else: assert False, "unknown predicate type: %s" % predicate + def _format_description(self, config, negate=False): + bool_ = self(config) + if negate: + bool_ = not negate + return self.description % { + "driver": config.db.url.get_driver_name(), + "database": config.db.url.get_backend_name(), + "doesnt_support": "doesn't support" if bool_ else "does support", + "does_support": "does support" if bool_ else "doesn't support" + } + + def _as_string(self, config=None, negate=False): + raise NotImplementedError() + class BooleanPredicate(Predicate): def __init__(self, value, description=None): @@ -131,14 +226,8 @@ def __init__(self, value, description=None): def __call__(self, config): return self.value - def _as_string(self, negate=False): - if negate: - return "not " + self.description - else: - return self.description - - def __str__(self): - return self._as_string() + def _as_string(self, config, negate=False): + return self._format_description(config, negate=negate) class SpecPredicate(Predicate): @@ -149,15 +238,15 @@ def __init__(self, db, op=None, spec=None, description=None): self.description = description _ops = { - '<': operator.lt, - '>': operator.gt, - '==': operator.eq, - '!=': operator.ne, - '<=': operator.le, - '>=': operator.ge, - 'in': operator.contains, - 'between': lambda val, pair: val >= pair[0] and val <= pair[1], - } + '<': operator.lt, + '>': operator.gt, + '==': operator.eq, + '!=': operator.ne, + '<=': operator.le, + '>=': operator.ge, + 'in': operator.contains, + 'between': lambda val, pair: val >= pair[0] and val <= pair[1], + } def __call__(self, config): engine = config.db @@ -177,14 +266,14 @@ def __call__(self, config): version = _server_version(engine) oper = hasattr(self.op, '__call__') and self.op \ - or self._ops[self.op] + or self._ops[self.op] return oper(version, self.spec) else: return True - def _as_string(self, negate=False): + def _as_string(self, config, negate=False): if self.description is not None: - return self.description + return self._format_description(config) elif self.op is None: if negate: return "not %s" % self.db @@ -193,24 +282,21 @@ def _as_string(self, negate=False): else: if negate: return "not %s %s %s" % ( - self.db, - self.op, - self.spec - ) + self.db, + self.op, + self.spec + ) else: return "%s %s %s" % ( - self.db, - self.op, - self.spec - ) - - def __str__(self): - return self._as_string() + self.db, + self.op, + self.spec + ) class LambdaPredicate(Predicate): def __init__(self, lambda_, description=None, args=None, kw=None): - spec = inspect.getargspec(lambda_) + spec = inspect_getargspec(lambda_) if not spec[0]: self.lambda_ = lambda db: lambda_() else: @@ -227,25 +313,23 @@ def __init__(self, lambda_, description=None, args=None, kw=None): def __call__(self, config): return self.lambda_(config) - def _as_string(self, negate=False): - if negate: - return "not " + self.description - else: - return self.description - - def __str__(self): - return self._as_string() + def _as_string(self, config, negate=False): + return self._format_description(config) class NotPredicate(Predicate): - def __init__(self, predicate): + def __init__(self, predicate, description=None): self.predicate = predicate + self.description = description def __call__(self, config): return not self.predicate(config) - def __str__(self): - return self.predicate._as_string(True) + def _as_string(self, config, negate=False): + if self.description: + return self._format_description(config, not negate) + else: + return self.predicate._as_string(config, not negate) class OrPredicate(Predicate): @@ -256,40 +340,32 @@ def __init__(self, predicates, description=None): def __call__(self, config): for pred in self.predicates: if pred(config): - self._str = pred return True return False - _str = None - - def _eval_str(self, negate=False): - if self._str is None: - if negate: - conjunction = " and " - else: - conjunction = " or " - return conjunction.join(p._as_string(negate=negate) - for p in self.predicates) + def _eval_str(self, config, negate=False): + if negate: + conjunction = " and " else: - return self._str._as_string(negate=negate) + conjunction = " or " + return conjunction.join(p._as_string(config, negate=negate) + for p in self.predicates) - def _negation_str(self): + def _negation_str(self, config): if self.description is not None: - return "Not " + (self.description % {"spec": self._str}) + return "Not " + self._format_description(config) else: - return self._eval_str(negate=True) + return self._eval_str(config, negate=True) - def _as_string(self, negate=False): + def _as_string(self, config, negate=False): if negate: - return self._negation_str() + return self._negation_str(config) else: if self.description is not None: - return self.description % {"spec": self._str} + return self._format_description(config) else: - return self._eval_str() + return self._eval_str(config) - def __str__(self): - return self._as_string() _as_predicate = Predicate.as_predicate @@ -310,8 +386,8 @@ def _server_version(engine): def db_spec(*dbs): return OrPredicate( - [Predicate.as_predicate(db) for db in dbs] - ) + [Predicate.as_predicate(db) for db in dbs] + ) def open(): @@ -321,8 +397,10 @@ def open(): def closed(): return skip_if(BooleanPredicate(True, "marked as skip")) -def fails(): - return fails_if(BooleanPredicate(True, "expected to fail")) + +def fails(reason=None): + return fails_if(BooleanPredicate(True, reason or "expected to fail")) + @decorator def future(fn, *arg): @@ -335,10 +413,10 @@ def fails_on(db, reason=None): def fails_on_everything_except(*dbs): return succeeds_if( - OrPredicate([ - SpecPredicate(db) for db in dbs - ]) - ) + OrPredicate([ + SpecPredicate(db) for db in dbs + ]) + ) def skip(db, reason=None): @@ -347,7 +425,7 @@ def skip(db, reason=None): def only_on(dbs, reason=None): return only_if( - OrPredicate([SpecPredicate(db) for db in util.to_list(dbs)]) + OrPredicate([Predicate.as_predicate(db) for db in util.to_list(dbs)]) ) @@ -358,6 +436,6 @@ def exclude(db, op, spec, reason=None): def against(config, *queries): assert queries, "no queries sent!" return OrPredicate([ - Predicate.as_predicate(query) - for query in queries - ])(config) + Predicate.as_predicate(query) + for query in queries + ])(config) diff --git a/lib/sqlalchemy/testing/fixtures.py b/lib/sqlalchemy/testing/fixtures.py index 7941bf0f82..d6712b4615 100644 --- a/lib/sqlalchemy/testing/fixtures.py +++ b/lib/sqlalchemy/testing/fixtures.py @@ -1,5 +1,6 @@ # testing/fixtures.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -17,6 +18,7 @@ # whether or not we use unittest changes things dramatically, # as far as how py.test collection works. + class TestBase(object): # A sequence of database names to always run, regardless of the # constraints below. @@ -48,6 +50,7 @@ def teardown(self): if hasattr(self, "tearDown"): self.tearDown() + class TablesTest(TestBase): # 'once', None @@ -88,20 +91,12 @@ def _init_class(cls): cls.run_create_tables = 'each' assert cls.run_inserts in ('each', None) - if cls.other is None: - cls.other = adict() - - if cls.tables is None: - cls.tables = adict() - - if cls.bind is None: - setattr(cls, 'bind', cls.setup_bind()) - - if cls.metadata is None: - setattr(cls, 'metadata', sa.MetaData()) + cls.other = adict() + cls.tables = adict() - if cls.metadata.bind is None: - cls.metadata.bind = cls.bind + cls.bind = cls.setup_bind() + cls.metadata = sa.MetaData() + cls.metadata.bind = cls.bind @classmethod def _setup_once_inserts(cls): @@ -139,13 +134,14 @@ def _setup_each_inserts(self): def _teardown_each_tables(self): # no need to run deletes if tables are recreated on setup if self.run_define_tables != 'each' and self.run_deletes == 'each': - for table in reversed(self.metadata.sorted_tables): - try: - table.delete().execute().close() - except sa.exc.DBAPIError as ex: - util.print_( - ("Error emptying table %s: %r" % (table, ex)), - file=sys.stderr) + with self.bind.connect() as conn: + for table in reversed(self.metadata.sorted_tables): + try: + conn.execute(table.delete()) + except sa.exc.DBAPIError as ex: + util.print_( + ("Error emptying table %s: %r" % (table, ex)), + file=sys.stderr) def setup(self): self._setup_each_tables() @@ -197,9 +193,8 @@ def insert_data(cls): def sql_count_(self, count, fn): self.assert_sql_count(self.bind, fn, count) - def sql_eq_(self, callable_, statements, with_sequences=None): - self.assert_sql(self.bind, - callable_, statements, with_sequences) + def sql_eq_(self, callable_, statements): + self.assert_sql(self.bind, callable_, statements) @classmethod def _load_fixtures(cls): @@ -221,6 +216,8 @@ def _load_fixtures(cls): for column_values in rows[table]]) from sqlalchemy import event + + class RemovesEvents(object): @util.memoized_property def _event_fns(self): @@ -238,7 +235,6 @@ def teardown(self): super_.teardown() - class _ORMTest(object): @classmethod @@ -279,12 +275,14 @@ def teardown_class(cls): def setup(self): self._setup_each_tables() + self._setup_each_classes() self._setup_each_mappers() self._setup_each_inserts() def teardown(self): sa.orm.session.Session.close_all() self._teardown_each_mappers() + self._teardown_each_classes() self._teardown_each_tables() @classmethod @@ -306,6 +304,10 @@ def _setup_each_mappers(self): if self.run_setup_mappers == 'each': self._with_register_classes(self.setup_mappers) + def _setup_each_classes(self): + if self.run_setup_classes == 'each': + self._with_register_classes(self.setup_classes) + @classmethod def _with_register_classes(cls, fn): """Run a setup method, framing the operation with a Base class @@ -340,6 +342,10 @@ def _teardown_each_mappers(self): if self.run_setup_mappers != 'once': sa.orm.clear_mappers() + def _teardown_each_classes(self): + if self.run_setup_classes != 'once': + self.classes.clear() + @classmethod def setup_classes(cls): pass @@ -365,14 +371,14 @@ class FindFixtureDeclarative(DeclarativeMeta): def __init__(cls, classname, bases, dict_): cls_registry[classname] = cls return DeclarativeMeta.__init__( - cls, classname, bases, dict_) + cls, classname, bases, dict_) class DeclarativeBasic(object): __table_cls__ = schema.Table _DeclBase = declarative_base(metadata=cls.metadata, - metaclass=FindFixtureDeclarative, - cls=DeclarativeBasic) + metaclass=FindFixtureDeclarative, + cls=DeclarativeBasic) cls.DeclarativeBasic = _DeclBase fn() diff --git a/lib/sqlalchemy/testing/mock.py b/lib/sqlalchemy/testing/mock.py index 18ba053ea5..674f0851ac 100644 --- a/lib/sqlalchemy/testing/mock.py +++ b/lib/sqlalchemy/testing/mock.py @@ -1,5 +1,6 @@ # testing/mock.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -10,12 +11,11 @@ from ..util import py33 if py33: - from unittest.mock import MagicMock, Mock, call, patch + from unittest.mock import MagicMock, Mock, call, patch, ANY else: try: - from mock import MagicMock, Mock, call, patch + from mock import MagicMock, Mock, call, patch, ANY except ImportError: raise ImportError( - "SQLAlchemy's test suite requires the " - "'mock' library as of 0.8.2.") - + "SQLAlchemy's test suite requires the " + "'mock' library as of 0.8.2.") diff --git a/lib/sqlalchemy/testing/pickleable.py b/lib/sqlalchemy/testing/pickleable.py index 9a41034bff..d6814a13af 100644 --- a/lib/sqlalchemy/testing/pickleable.py +++ b/lib/sqlalchemy/testing/pickleable.py @@ -1,5 +1,6 @@ # testing/pickleable.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -62,8 +63,8 @@ def __init__(self, moredata): def __eq__(self, other): return other.data == self.data and \ - other.stuff == self.stuff and \ - other.moredata == self.moredata + other.stuff == self.stuff and \ + other.moredata == self.moredata class Bar(object): diff --git a/lib/sqlalchemy/testing/plugin/bootstrap.py b/lib/sqlalchemy/testing/plugin/bootstrap.py new file mode 100644 index 0000000000..497fcb7e58 --- /dev/null +++ b/lib/sqlalchemy/testing/plugin/bootstrap.py @@ -0,0 +1,44 @@ +""" +Bootstrapper for nose/pytest plugins. + +The entire rationale for this system is to get the modules in plugin/ +imported without importing all of the supporting library, so that we can +set up things for testing before coverage starts. + +The rationale for all of plugin/ being *in* the supporting library in the +first place is so that the testing and plugin suite is available to other +libraries, mainly external SQLAlchemy and Alembic dialects, to make use +of the same test environment and standard suites available to +SQLAlchemy/Alembic themselves without the need to ship/install a separate +package outside of SQLAlchemy. + +NOTE: copied/adapted from SQLAlchemy master for backwards compatibility; +this should be removable when Alembic targets SQLAlchemy 1.0.0. + +""" + +import os +import sys + +bootstrap_file = locals()['bootstrap_file'] +to_bootstrap = locals()['to_bootstrap'] + + +def load_file_as_module(name): + path = os.path.join(os.path.dirname(bootstrap_file), "%s.py" % name) + if sys.version_info >= (3, 3): + from importlib import machinery + mod = machinery.SourceFileLoader(name, path).load_module() + else: + import imp + mod = imp.load_source(name, path) + return mod + +if to_bootstrap == "pytest": + sys.modules["sqla_plugin_base"] = load_file_as_module("plugin_base") + sys.modules["sqla_pytestplugin"] = load_file_as_module("pytestplugin") +elif to_bootstrap == "nose": + sys.modules["sqla_plugin_base"] = load_file_as_module("plugin_base") + sys.modules["sqla_noseplugin"] = load_file_as_module("noseplugin") +else: + raise Exception("unknown bootstrap: %s" % to_bootstrap) # noqa diff --git a/lib/sqlalchemy/testing/plugin/noseplugin.py b/lib/sqlalchemy/testing/plugin/noseplugin.py index 18a1178a60..9fc5848fbe 100644 --- a/lib/sqlalchemy/testing/plugin/noseplugin.py +++ b/lib/sqlalchemy/testing/plugin/noseplugin.py @@ -1,5 +1,6 @@ # plugin/noseplugin.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -11,16 +12,22 @@ """ +try: + # installed by bootstrap.py + import sqla_plugin_base as plugin_base +except ImportError: + # assume we're a package, use traditional import + from . import plugin_base + + import os +import sys from nose.plugins import Plugin +import nose fixtures = None -# no package imports yet! this prevents us from tripping coverage -# too soon. -import imp -path = os.path.join(os.path.dirname(__file__), "plugin_base.py") -plugin_base = imp.load_source("plugin_base", path) +py3k = sys.version_info >= (3, 0) class NoseSQLAlchemy(Plugin): @@ -50,40 +57,51 @@ def configure(self, options, conf): plugin_base.set_coverage_flag(options.enable_plugin_coverage) - global fixtures - from sqlalchemy.testing import fixtures + plugin_base.set_skip_test(nose.SkipTest) def begin(self): + global fixtures + from sqlalchemy.testing import fixtures # noqa + plugin_base.post_begin() def describeTest(self, test): return "" def wantFunction(self, fn): - if fn.__module__ is None: - return False - if fn.__module__.startswith('sqlalchemy.testing'): - return False + return False + + def wantMethod(self, fn): + if py3k: + if not hasattr(fn.__self__, 'cls'): + return False + cls = fn.__self__.cls + else: + cls = fn.im_class + return plugin_base.want_method(cls, fn) def wantClass(self, cls): return plugin_base.want_class(cls) def beforeTest(self, test): - plugin_base.before_test(test, - test.test.cls.__module__, - test.test.cls, test.test.method.__name__) + if not hasattr(test.test, 'cls'): + return + plugin_base.before_test( + test, + test.test.cls.__module__, + test.test.cls, test.test.method.__name__) def afterTest(self, test): plugin_base.after_test(test) def startContext(self, ctx): if not isinstance(ctx, type) \ - or not issubclass(ctx, fixtures.TestBase): + or not issubclass(ctx, fixtures.TestBase): return plugin_base.start_test_class(ctx) def stopContext(self, ctx): if not isinstance(ctx, type) \ - or not issubclass(ctx, fixtures.TestBase): + or not issubclass(ctx, fixtures.TestBase): return plugin_base.stop_test_class(ctx) diff --git a/lib/sqlalchemy/testing/plugin/plugin_base.py b/lib/sqlalchemy/testing/plugin/plugin_base.py index 061848e271..1ba0a800f5 100644 --- a/lib/sqlalchemy/testing/plugin/plugin_base.py +++ b/lib/sqlalchemy/testing/plugin/plugin_base.py @@ -1,5 +1,6 @@ # plugin/plugin_base.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -7,18 +8,12 @@ """Testing extensions. this module is designed to work as a testing-framework-agnostic library, -so that we can continue to support nose and also begin adding new functionality -via py.test. +so that we can continue to support nose and also begin adding new +functionality via py.test. """ from __future__ import absolute_import -try: - # unitttest has a SkipTest also but pytest doesn't - # honor it unless nose is imported too... - from nose import SkipTest -except ImportError: - from _pytest.runner import Skipped as SkipTest import sys import re @@ -30,7 +25,6 @@ else: import ConfigParser as configparser - # late imports fixtures = None engines = None @@ -46,53 +40,102 @@ logging = None -db_opts = {} +include_tags = set() +exclude_tags = set() options = None + def setup_options(make_option): make_option("--log-info", action="callback", type="string", callback=_log, - help="turn on info logging for (multiple OK)") - make_option("--log-debug", action="callback", type="string", callback=_log, - help="turn on debug logging for (multiple OK)") + help="turn on info logging for (multiple OK)") + make_option("--log-debug", action="callback", + type="string", callback=_log, + help="turn on debug logging for (multiple OK)") make_option("--db", action="append", type="string", dest="db", help="Use prefab database uri. Multiple OK, " - "first one is run by default.") + "first one is run by default.") make_option('--dbs', action='callback', callback=_list_dbs, - help="List available prefab dbs") + help="List available prefab dbs") make_option("--dburi", action="append", type="string", dest="dburi", - help="Database uri. Multiple OK, first one is run by default.") + help="Database uri. Multiple OK, " + "first one is run by default.") make_option("--dropfirst", action="store_true", dest="dropfirst", - help="Drop all tables in the target database first") + help="Drop all tables in the target database first") make_option("--backend-only", action="store_true", dest="backend_only", - help="Run only tests marked with __backend__") - make_option("--mockpool", action="store_true", dest="mockpool", - help="Use mock pool (asserts only one connection used)") - make_option("--low-connections", action="store_true", dest="low_connections", - help="Use a low number of distinct connections - i.e. for Oracle TNS" - ) - make_option("--reversetop", action="store_true", dest="reversetop", default=False, - help="Use a random-ordering set implementation in the ORM (helps " - "reveal dependency issues)") + help="Run only tests marked with __backend__") + make_option("--low-connections", action="store_true", + dest="low_connections", + help="Use a low number of distinct connections - " + "i.e. for Oracle TNS") + make_option("--reversetop", action="store_true", + dest="reversetop", default=False, + help="Use a random-ordering set implementation in the ORM " + "(helps reveal dependency issues)") make_option("--requirements", action="callback", type="string", - callback=_requirements_opt, - help="requirements class for testing, overrides setup.cfg") - make_option("--with-cdecimal", action="store_true", dest="cdecimal", default=False, - help="Monkeypatch the cdecimal library into Python 'decimal' for all tests") - make_option("--serverside", action="callback", callback=_server_side_cursors, - help="Turn on server side cursors for PG") - make_option("--mysql-engine", action="store", dest="mysql_engine", default=None, - help="Use the specified MySQL storage engine for all tables, default is " - "a db-default/InnoDB combo.") - make_option("--tableopts", action="append", dest="tableopts", default=[], - help="Add a dialect-specific table option, key=value") - make_option("--write-profiles", action="store_true", dest="write_profiles", default=False, - help="Write/update profiling data.") + callback=_requirements_opt, + help="requirements class for testing, overrides setup.cfg") + make_option("--with-cdecimal", action="store_true", + dest="cdecimal", default=False, + help="Monkeypatch the cdecimal library into Python 'decimal' " + "for all tests") + make_option("--include-tag", action="callback", callback=_include_tag, + type="string", + help="Include tests with tag ") + make_option("--exclude-tag", action="callback", callback=_exclude_tag, + type="string", + help="Exclude tests with tag ") + make_option("--write-profiles", action="store_true", + dest="write_profiles", default=False, + help="Write/update failing profiling data.") + make_option("--force-write-profiles", action="store_true", + dest="force_write_profiles", default=False, + help="Unconditionally write/update profiling data.") + + +def configure_follower(follower_ident): + """Configure required state for a follower. + + This invokes in the parent process and typically includes + database creation. + + """ + from sqlalchemy.testing import provision + provision.FOLLOWER_IDENT = follower_ident + + +def memoize_important_follower_config(dict_): + """Store important configuration we will need to send to a follower. + + This invokes in the parent process after normal config is set up. + + This is necessary as py.test seems to not be using forking, so we + start with nothing in memory, *but* it isn't running our argparse + callables, so we have to just copy all of that over. + + """ + dict_['memoized_config'] = { + 'include_tags': include_tags, + 'exclude_tags': exclude_tags + } + + +def restore_important_follower_config(dict_): + """Restore important configuration needed by a follower. + + This invokes in the follower process. + + """ + global include_tags, exclude_tags + include_tags.update(dict_['memoized_config']['include_tags']) + exclude_tags.update(dict_['memoized_config']['exclude_tags']) + def read_config(): global file_config file_config = configparser.ConfigParser() file_config.read(['setup.cfg', 'test.cfg']) + def pre_begin(opt): """things to set up early, before coverage might be setup.""" global options @@ -100,9 +143,18 @@ def pre_begin(opt): for fn in pre_configure: fn(options, file_config) + def set_coverage_flag(value): options.has_coverage = value +_skip_test_exception = None + + +def set_skip_test(exc): + global _skip_test_exception + _skip_test_exception = exc + + def post_begin(): """things to set up later, once we know coverage is running.""" # Lazy setup of other options (post coverage) @@ -112,12 +164,14 @@ def post_begin(): # late imports, has to happen after config as well # as nose plugins like coverage global util, fixtures, engines, exclusions, \ - assertions, warnings, profiling,\ - config, testing - from sqlalchemy import testing - from sqlalchemy.testing import fixtures, engines, exclusions, \ - assertions, warnings, profiling, config - from sqlalchemy import util + assertions, warnings, profiling,\ + config, testing + from sqlalchemy import testing # noqa + from sqlalchemy.testing import fixtures, engines, exclusions # noqa + from sqlalchemy.testing import assertions, warnings, profiling # noqa + from sqlalchemy.testing import config # noqa + from sqlalchemy import util # noqa + warnings.setup_filters() def _log(opt_str, value, parser): @@ -139,13 +193,17 @@ def _list_dbs(*args): sys.exit(0) -def _server_side_cursors(opt_str, value, parser): - db_opts['server_side_cursors'] = True - def _requirements_opt(opt_str, value, parser): _setup_requirements(value) +def _exclude_tag(opt_str, value, parser): + exclude_tags.add(value.replace('-', '_')) + + +def _include_tag(opt_str, value, parser): + include_tags.add(value.replace('-', '_')) + pre_configure = [] post_configure = [] @@ -173,10 +231,18 @@ def _monkeypatch_cdecimal(options, file_config): sys.modules['decimal'] = cdecimal +@post +def _init_skiptest(options, file_config): + from sqlalchemy.testing import config + + config._skip_test_exception = _skip_test_exception + + @post def _engine_uri(options, file_config): - from sqlalchemy.testing import engines, config + from sqlalchemy.testing import config from sqlalchemy import testing + from sqlalchemy.testing import provision if options.dburi: db_urls = list(options.dburi) @@ -188,8 +254,9 @@ def _engine_uri(options, file_config): for db in re.split(r'[,\s]+', db_token): if db not in file_config.options('db'): raise RuntimeError( - "Unknown URI specifier '%s'. Specify --dbs for known uris." - % db) + "Unknown URI specifier '%s'. " + "Specify --dbs for known uris." + % db) else: db_urls.append(file_config.get('db', db)) @@ -197,18 +264,12 @@ def _engine_uri(options, file_config): db_urls.append(file_config.get('db', 'default')) for db_url in db_urls: - eng = engines.testing_engine(db_url, db_opts) - eng.connect().close() - config.Config.register(eng, db_opts, options, file_config, testing) - - config.db_opts = db_opts + cfg = provision.setup_config( + db_url, options, file_config, provision.FOLLOWER_IDENT) + if not config._current: + cfg.set_as_current(cfg, testing) -@post -def _engine_pool(options, file_config): - if options.mockpool: - from sqlalchemy import pool - db_opts['poolclass'] = pool.AssertionPool @post def _requirements(options, file_config): @@ -216,6 +277,7 @@ def _requirements(options, file_config): requirement_cls = file_config.get('sqla_testing', "requirement_cls") _setup_requirements(requirement_cls) + def _setup_requirements(argument): from sqlalchemy.testing import config from sqlalchemy import testing @@ -234,9 +296,11 @@ def _setup_requirements(argument): config.requirements = testing.requires = req_cls() + @post def _prep_testing_database(options, file_config): - from sqlalchemy.testing import config + from sqlalchemy.testing import config, util + from sqlalchemy.testing.exclusions import against from sqlalchemy import schema, inspect if options.dropfirst: @@ -249,40 +313,35 @@ def _prep_testing_database(options, file_config): pass else: for vname in view_names: - e.execute(schema._DropView(schema.Table(vname, schema.MetaData()))) + e.execute(schema._DropView( + schema.Table(vname, schema.MetaData()) + )) if config.requirements.schemas.enabled_for_config(cfg): try: - view_names = inspector.get_view_names(schema="test_schema") + view_names = inspector.get_view_names( + schema="test_schema") except NotImplementedError: pass else: for vname in view_names: e.execute(schema._DropView( - schema.Table(vname, - schema.MetaData(), schema="test_schema"))) + schema.Table(vname, schema.MetaData(), + schema="test_schema") + )) - for tname in reversed(inspector.get_table_names(order_by="foreign_key")): - e.execute(schema.DropTable(schema.Table(tname, schema.MetaData()))) + util.drop_all_tables(e, inspector) if config.requirements.schemas.enabled_for_config(cfg): - for tname in reversed(inspector.get_table_names( - order_by="foreign_key", schema="test_schema")): - e.execute(schema.DropTable( - schema.Table(tname, schema.MetaData(), schema="test_schema"))) - - -@post -def _set_table_options(options, file_config): - from sqlalchemy.testing import schema + util.drop_all_tables(e, inspector, schema=cfg.test_schema) - table_options = schema.table_options - for spec in options.tableopts: - key, value = spec.split('=') - table_options[key] = value - - if options.mysql_engine: - table_options['mysql_engine'] = options.mysql_engine + if against(cfg, "postgresql"): + from sqlalchemy.dialects import postgresql + for enum in inspector.get_enums("*"): + e.execute(postgresql.DropEnumType( + postgresql.ENUM( + name=enum['name'], + schema=enum['schema']))) @post @@ -303,7 +362,7 @@ def _post_setup_options(opt, file_config): def _setup_profiling(options, file_config): from sqlalchemy.testing import profiling profiling._profile_stats = profiling.ProfileStatsFile( - file_config.get('sqla_testing', 'profile_file')) + file_config.get('sqla_testing', 'profile_file')) def want_class(cls): @@ -311,22 +370,48 @@ def want_class(cls): return False elif cls.__name__.startswith('_'): return False - elif config.options.backend_only and not getattr(cls, '__backend__', False): + elif config.options.backend_only and not getattr(cls, '__backend__', + False): return False else: return True + +def want_method(cls, fn): + if not fn.__name__.startswith("test_"): + return False + elif fn.__module__ is None: + return False + elif include_tags: + return ( + hasattr(cls, '__tags__') and + exclusions.tags(cls.__tags__).include_test( + include_tags, exclude_tags) + ) or ( + hasattr(fn, '_sa_exclusion_extend') and + fn._sa_exclusion_extend.include_test( + include_tags, exclude_tags) + ) + elif exclude_tags and hasattr(cls, '__tags__'): + return exclusions.tags(cls.__tags__).include_test( + include_tags, exclude_tags) + elif exclude_tags and hasattr(fn, '_sa_exclusion_extend'): + return fn._sa_exclusion_extend.include_test(include_tags, exclude_tags) + else: + return True + + def generate_sub_tests(cls, module): if getattr(cls, '__backend__', False): - for cfg in config.Config.all_configs(): + for cfg in _possible_configs_for_cls(cls): name = "%s_%s_%s" % (cls.__name__, cfg.db.name, cfg.db.driver) subcls = type( - name, - (cls, ), - { - "__only_on__": ("%s+%s" % (cfg.db.name, cfg.db.driver)), - "__backend__": False} - ) + name, + (cls, ), + { + "__only_on__": ("%s+%s" % (cfg.db.name, cfg.db.driver)), + } + ) setattr(module, name, subcls) yield subcls else: @@ -337,20 +422,28 @@ def start_test_class(cls): _do_skips(cls) _setup_engine(cls) + def stop_test_class(cls): + #from sqlalchemy import inspect + #assert not inspect(testing.db).get_table_names() engines.testing_reaper._stop_test_ctx() - if not options.low_connections: - assertions.global_cleanup_assertions() - _restore_engine() + try: + if not options.low_connections: + assertions.global_cleanup_assertions() + finally: + _restore_engine() + def _restore_engine(): config._current.reset(testing) + def _setup_engine(cls): if getattr(cls, '__engine_options__', None): eng = engines.testing_engine(options=cls.__engine_options__) config._current.push_engine(eng, testing) + def before_test(test, test_module_name, test_class, test_name): # like a nose id, e.g.: @@ -363,16 +456,27 @@ def before_test(test, test_module_name, test_class, test_name): id_ = "%s.%s.%s" % (test_module_name, name, test_name) - warnings.resetwarnings() profiling._current_test = id_ + def after_test(test): engines.testing_reaper._after_test_ctx() - warnings.resetwarnings() -def _do_skips(cls): + +def _possible_configs_for_cls(cls, reasons=None): all_configs = set(config.Config.all_configs()) - reasons = [] + + if cls.__unsupported_on__: + spec = exclusions.db_spec(*cls.__unsupported_on__) + for config_obj in list(all_configs): + if spec(config_obj): + all_configs.remove(config_obj) + + if getattr(cls, '__only_on__', None): + spec = exclusions.db_spec(*util.to_list(cls.__only_on__)) + for config_obj in list(all_configs): + if not spec(config_obj): + all_configs.remove(config_obj) if hasattr(cls, '__requires__'): requirements = config.requirements @@ -380,10 +484,11 @@ def _do_skips(cls): for requirement in cls.__requires__: check = getattr(requirements, requirement) - if check.predicate(config_obj): + skip_reasons = check.matching_config_reasons(config_obj) + if skip_reasons: all_configs.remove(config_obj) - if check.reason: - reasons.append(check.reason) + if reasons is not None: + reasons.extend(skip_reasons) break if hasattr(cls, '__prefer_requires__'): @@ -393,51 +498,45 @@ def _do_skips(cls): for requirement in cls.__prefer_requires__: check = getattr(requirements, requirement) - if check.predicate(config_obj): + if not check.enabled_for_config(config_obj): non_preferred.add(config_obj) if all_configs.difference(non_preferred): all_configs.difference_update(non_preferred) - if cls.__unsupported_on__: - spec = exclusions.db_spec(*cls.__unsupported_on__) - for config_obj in list(all_configs): - if spec(config_obj): - all_configs.remove(config_obj) + return all_configs - if getattr(cls, '__only_on__', None): - spec = exclusions.db_spec(*util.to_list(cls.__only_on__)) - for config_obj in list(all_configs): - if not spec(config_obj): - all_configs.remove(config_obj) +def _do_skips(cls): + reasons = [] + all_configs = _possible_configs_for_cls(cls, reasons) if getattr(cls, '__skip_if__', False): for c in getattr(cls, '__skip_if__'): if c(): - raise SkipTest("'%s' skipped by %s" % ( + config.skip_test("'%s' skipped by %s" % ( cls.__name__, c.__name__) ) - for db_spec, op, spec in getattr(cls, '__excluded_on__', ()): - for config_obj in list(all_configs): - if exclusions.skip_if( - exclusions.SpecPredicate(db_spec, op, spec) - ).predicate(config_obj): - all_configs.remove(config_obj) - - if not all_configs: - raise SkipTest( - "'%s' unsupported on DB implementation %s%s" % ( + if getattr(cls, '__backend__', False): + msg = "'%s' unsupported for implementation '%s'" % ( + cls.__name__, cls.__only_on__) + else: + msg = "'%s' unsupported on any DB implementation %s%s" % ( cls.__name__, - ", ".join("'%s' = %s" % ( - config_obj.db.name, - config_obj.db.dialect.server_version_info) - for config_obj in config.Config.all_configs() + ", ".join( + "'%s(%s)+%s'" % ( + config_obj.db.name, + ".".join( + str(dig) for dig in + config_obj.db.dialect.server_version_info), + config_obj.db.driver + ) + for config_obj in config.Config.all_configs() ), ", ".join(reasons) ) - ) + config.skip_test(msg) elif hasattr(cls, '__prefer_backends__'): non_preferred = set() spec = exclusions.db_spec(*util.to_list(cls.__prefer_backends__)) @@ -450,6 +549,6 @@ def _do_skips(cls): if config._current not in all_configs: _setup_config(all_configs.pop(), cls) + def _setup_config(config_obj, ctx): config._current.push(config_obj, testing) - diff --git a/lib/sqlalchemy/testing/plugin/pytestplugin.py b/lib/sqlalchemy/testing/plugin/pytestplugin.py index 74d5cc0833..5bb6b966d5 100644 --- a/lib/sqlalchemy/testing/plugin/pytestplugin.py +++ b/lib/sqlalchemy/testing/plugin/pytestplugin.py @@ -1,8 +1,22 @@ +try: + # installed by bootstrap.py + import sqla_plugin_base as plugin_base +except ImportError: + # assume we're a package, use traditional import + from . import plugin_base + import pytest import argparse import inspect -from . import plugin_base import collections +import itertools + +try: + import xdist # noqa + has_xdist = True +except ImportError: + has_xdist = False + def pytest_addoption(parser): group = parser.getgroup("sqlalchemy") @@ -11,7 +25,8 @@ def make_option(name, **kw): callback_ = kw.pop("callback", None) if callback_: class CallableAction(argparse.Action): - def __call__(self, parser, namespace, values, option_string=None): + def __call__(self, parser, namespace, + values, option_string=None): callback_(option_string, values, parser) kw["action"] = CallableAction @@ -20,13 +35,42 @@ def __call__(self, parser, namespace, values, option_string=None): plugin_base.setup_options(make_option) plugin_base.read_config() + def pytest_configure(config): + if hasattr(config, "slaveinput"): + plugin_base.restore_important_follower_config(config.slaveinput) + plugin_base.configure_follower( + config.slaveinput["follower_ident"] + ) + plugin_base.pre_begin(config.option) - plugin_base.set_coverage_flag(bool(getattr(config.option, "cov_source", False))) + plugin_base.set_coverage_flag(bool(getattr(config.option, + "cov_source", False))) + + plugin_base.set_skip_test(pytest.skip.Exception) + +def pytest_sessionstart(session): plugin_base.post_begin() +if has_xdist: + import uuid + + def pytest_configure_node(node): + # the master for each node fills slaveinput dictionary + # which pytest-xdist will transfer to the subprocess + + plugin_base.memoize_important_follower_config(node.slaveinput) + + node.slaveinput["follower_ident"] = "test_%s" % uuid.uuid4().hex[0:12] + from sqlalchemy.testing import provision + provision.create_follower_db(node.slaveinput["follower_ident"]) + + def pytest_testnodedown(node, error): + from sqlalchemy.testing import provision + provision.drop_follower_db(node.slaveinput["follower_ident"]) + def pytest_collection_modifyitems(session, config, items): # look for all those classes that specify __backend__ and @@ -40,14 +84,20 @@ def pytest_collection_modifyitems(session, config, items): # new classes to a module on the fly. rebuilt_items = collections.defaultdict(list) + items[:] = [ + item for item in + items if isinstance(item.parent, pytest.Instance) + and not item.parent.parent.name.startswith("_")] test_classes = set(item.parent for item in items) for test_class in test_classes: - for sub_cls in plugin_base.generate_sub_tests(test_class.cls, test_class.parent.module): + for sub_cls in plugin_base.generate_sub_tests( + test_class.cls, test_class.parent.module): if sub_cls is not test_class.cls: list_ = rebuilt_items[test_class.cls] - for inst in pytest.Class(sub_cls.__name__, - parent=test_class.parent.parent).collect(): + for inst in pytest.Class( + sub_cls.__name__, + parent=test_class.parent.parent).collect(): list_.extend(inst.collect()) newitems = [] @@ -61,27 +111,25 @@ def pytest_collection_modifyitems(session, config, items): # seems like the functions attached to a test class aren't sorted already? # is that true and why's that? (when using unittest, they're sorted) items[:] = sorted(newitems, key=lambda item: ( - item.parent.parent.parent.name, - item.parent.parent.name, - item.name - ) - ) - + item.parent.parent.parent.name, + item.parent.parent.name, + item.name + )) def pytest_pycollect_makeitem(collector, name, obj): - if inspect.isclass(obj) and plugin_base.want_class(obj): return pytest.Class(name, parent=collector) elif inspect.isfunction(obj) and \ - name.startswith("test_") and \ - isinstance(collector, pytest.Instance): + isinstance(collector, pytest.Instance) and \ + plugin_base.want_method(collector.cls, obj): return pytest.Function(name, parent=collector) else: return [] _current_class = None + def pytest_runtest_setup(item): # here we seem to get called only based on what we collected # in pytest_collection_modifyitems. So to do class-based stuff @@ -92,18 +140,22 @@ def pytest_runtest_setup(item): return # ... so we're doing a little dance here to figure it out... - if item.parent.parent is not _current_class: - + if _current_class is None: class_setup(item.parent.parent) _current_class = item.parent.parent # this is needed for the class-level, to ensure that the # teardown runs after the class is completed with its own # class-level teardown... - item.parent.parent.addfinalizer(lambda: class_teardown(item.parent.parent)) + def finalize(): + global _current_class + class_teardown(item.parent.parent) + _current_class = None + item.parent.parent.addfinalizer(finalize) test_setup(item) + def pytest_runtest_teardown(item): # ...but this works better as the hook here rather than # using a finalizer, as the finalizer seems to get in the way @@ -111,15 +163,19 @@ def pytest_runtest_teardown(item): # py.test assertion stuff instead) test_teardown(item) + def test_setup(item): - plugin_base.before_test(item, - item.parent.module.__name__, item.parent.cls, item.name) + plugin_base.before_test(item, item.parent.module.__name__, + item.parent.cls, item.name) + def test_teardown(item): plugin_base.after_test(item) + def class_setup(item): plugin_base.start_test_class(item.cls) + def class_teardown(item): plugin_base.stop_test_class(item.cls) diff --git a/lib/sqlalchemy/testing/profiling.py b/lib/sqlalchemy/testing/profiling.py index 2f92527e94..a88cd21ad2 100644 --- a/lib/sqlalchemy/testing/profiling.py +++ b/lib/sqlalchemy/testing/profiling.py @@ -1,5 +1,6 @@ # testing/profiling.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -13,13 +14,11 @@ import os import sys -from .util import gc_collect, decorator +from .util import gc_collect from . import config -from .plugin.plugin_base import SkipTest import pstats -import time import collections -from .. import util +import contextlib try: import cProfile @@ -29,64 +28,8 @@ _current_test = None - -def profiled(target=None, **target_opts): - """Function profiling. - - @profiled() - or - @profiled(report=True, sort=('calls',), limit=20) - - Outputs profiling info for a decorated function. - - """ - - profile_config = {'targets': set(), - 'report': True, - 'print_callers': False, - 'print_callees': False, - 'graphic': False, - 'sort': ('time', 'calls'), - 'limit': None} - if target is None: - target = 'anonymous_target' - - @decorator - def decorate(fn, *args, **kw): - elapsed, load_stats, result = _profile( - fn, *args, **kw) - - graphic = target_opts.get('graphic', profile_config['graphic']) - if graphic: - os.system("runsnake %s" % filename) - else: - report = target_opts.get('report', profile_config['report']) - if report: - sort_ = target_opts.get('sort', profile_config['sort']) - limit = target_opts.get('limit', profile_config['limit']) - print(("Profile report for target '%s'" % ( - target, ) - )) - - stats = load_stats() - stats.sort_stats(*sort_) - if limit: - stats.print_stats(limit) - else: - stats.print_stats() - - print_callers = target_opts.get( - 'print_callers', profile_config['print_callers']) - if print_callers: - stats.print_callers() - - print_callees = target_opts.get( - 'print_callees', profile_config['print_callees']) - if print_callees: - stats.print_callees() - - return result - return decorate +# ProfileStatsFile instance, set up in plugin_base +_profile_stats = None class ProfileStatsFile(object): @@ -96,8 +39,13 @@ class ProfileStatsFile(object): so no json lib :( need to roll something silly """ + def __init__(self, filename): - self.write = ( + self.force_write = ( + config.options is not None and + config.options.force_write_profiles + ) + self.write = self.force_write or ( config.options is not None and config.options.write_profiles ) @@ -127,6 +75,11 @@ def platform_key(self): platform_tokens.append("pypy") if win32: platform_tokens.append("win") + platform_tokens.append( + "nativeunicode" + if config.db.dialect.convert_unicode + else "dbapiunicode" + ) _has_cext = config.requirements._has_cextensions() platform_tokens.append(_has_cext and "cextensions" or "nocextensions") return "_".join(platform_tokens) @@ -170,25 +123,32 @@ def replace(self, callcount): per_fn = self.data[test_key] per_platform = per_fn[self.platform_key] counts = per_platform['counts'] - counts[-1] = callcount + current_count = per_platform['current_count'] + if current_count < len(counts): + counts[current_count - 1] = callcount + else: + counts[-1] = callcount if self.write: self._write() def _header(self): - return \ - "# %s\n"\ - "# This file is written out on a per-environment basis.\n"\ - "# For each test in aaa_profiling, the corresponding function and \n"\ - "# environment is located within this file. If it doesn't exist,\n"\ - "# the test is skipped.\n"\ - "# If a callcount does exist, it is compared to what we received. \n"\ - "# assertions are raised if the counts do not match.\n"\ - "# \n"\ - "# To add a new callcount test, apply the function_call_count \n"\ - "# decorator and re-run the tests using the --write-profiles \n"\ - "# option - this file will be rewritten including the new count.\n"\ - "# \n"\ - "" % (self.fname) + return ( + "# %s\n" + "# This file is written out on a per-environment basis.\n" + "# For each test in aaa_profiling, the corresponding " + "function and \n" + "# environment is located within this file. " + "If it doesn't exist,\n" + "# the test is skipped.\n" + "# If a callcount does exist, it is compared " + "to what we received. \n" + "# assertions are raised if the counts do not match.\n" + "# \n" + "# To add a new callcount test, apply the function_call_count \n" + "# decorator and re-run the tests using the --write-profiles \n" + "# option - this file will be rewritten including the new count.\n" + "# \n" + ) % (self.fname) def _read(self): try: @@ -224,7 +184,6 @@ def _write(self): profile_f.close() - def function_call_count(variance=0.05): """Assert a target for a test case's function call count. @@ -238,72 +197,69 @@ def function_call_count(variance=0.05): def decorate(fn): def wrap(*args, **kw): - - if cProfile is None: - raise SkipTest("cProfile is not installed") - - if not _profile_stats.has_stats() and not _profile_stats.write: - # run the function anyway, to support dependent tests - # (not a great idea but we have these in test_zoomark) - fn(*args, **kw) - raise SkipTest("No profiling stats available on this " - "platform for this function. Run tests with " - "--write-profiles to add statistics to %s for " - "this platform." % _profile_stats.short_fname) - - gc_collect() - - timespent, load_stats, fn_result = _profile( - fn, *args, **kw - ) - stats = load_stats() - callcount = stats.total_calls - - expected = _profile_stats.result(callcount) - if expected is None: - expected_count = None - else: - line_no, expected_count = expected - - print(("Pstats calls: %d Expected %s" % ( - callcount, - expected_count - ) - )) - stats.print_stats() - #stats.print_callers() - - if expected_count: - deviance = int(callcount * variance) - failed = abs(callcount - expected_count) > deviance - - if failed: - if _profile_stats.write: - _profile_stats.replace(callcount) - else: - raise AssertionError( - "Adjusted function call count %s not within %s%% " - "of expected %s. Rerun with --write-profiles to " - "regenerate this callcount." - % ( - callcount, (variance * 100), - expected_count)) - return fn_result + with count_functions(variance=variance): + return fn(*args, **kw) return update_wrapper(wrap, fn) return decorate -def _profile(fn, *args, **kw): - filename = "%s.prof" % fn.__name__ +@contextlib.contextmanager +def count_functions(variance=0.05): + if cProfile is None: + raise SkipTest("cProfile is not installed") + + if not _profile_stats.has_stats() and not _profile_stats.write: + config.skip_test( + "No profiling stats available on this " + "platform for this function. Run tests with " + "--write-profiles to add statistics to %s for " + "this platform." % _profile_stats.short_fname) + + gc_collect() + + pr = cProfile.Profile() + pr.enable() + #began = time.time() + yield + #ended = time.time() + pr.disable() - def load_stats(): - st = pstats.Stats(filename) - os.unlink(filename) - return st + #s = compat.StringIO() + stats = pstats.Stats(pr, stream=sys.stdout) + + #timespent = ended - began + callcount = stats.total_calls + + expected = _profile_stats.result(callcount) + + if expected is None: + expected_count = None + else: + line_no, expected_count = expected + + print(("Pstats calls: %d Expected %s" % ( + callcount, + expected_count + ) + )) + stats.sort_stats("cumulative") + stats.print_stats() + + if expected_count: + deviance = int(callcount * variance) + failed = abs(callcount - expected_count) > deviance + + if failed or _profile_stats.force_write: + if _profile_stats.write: + _profile_stats.replace(callcount) + else: + raise AssertionError( + "Adjusted function call count %s not within %s%% " + "of expected %s, platform %s. Rerun with " + "--write-profiles to " + "regenerate this callcount." + % ( + callcount, (variance * 100), + expected_count, _profile_stats.platform_key)) - began = time.time() - cProfile.runctx('result = fn(*args, **kw)', globals(), locals(), - filename=filename) - ended = time.time() - return ended - began, load_stats, locals()['result'] diff --git a/lib/sqlalchemy/testing/provision.py b/lib/sqlalchemy/testing/provision.py new file mode 100644 index 0000000000..4bba1be670 --- /dev/null +++ b/lib/sqlalchemy/testing/provision.py @@ -0,0 +1,308 @@ +from sqlalchemy.engine import url as sa_url +from sqlalchemy import text +from sqlalchemy import exc +from sqlalchemy.util import compat +from . import config, engines +import time +import logging +import os +log = logging.getLogger(__name__) + +FOLLOWER_IDENT = None + + +class register(object): + def __init__(self): + self.fns = {} + + @classmethod + def init(cls, fn): + return register().for_db("*")(fn) + + def for_db(self, dbname): + def decorate(fn): + self.fns[dbname] = fn + return self + return decorate + + def __call__(self, cfg, *arg): + if isinstance(cfg, compat.string_types): + url = sa_url.make_url(cfg) + elif isinstance(cfg, sa_url.URL): + url = cfg + else: + url = cfg.db.url + backend = url.get_backend_name() + if backend in self.fns: + return self.fns[backend](cfg, *arg) + else: + return self.fns['*'](cfg, *arg) + + +def create_follower_db(follower_ident): + + for cfg in _configs_for_db_operation(): + _create_db(cfg, cfg.db, follower_ident) + + +def configure_follower(follower_ident): + for cfg in config.Config.all_configs(): + _configure_follower(cfg, follower_ident) + + +def setup_config(db_url, options, file_config, follower_ident): + if follower_ident: + db_url = _follower_url_from_main(db_url, follower_ident) + db_opts = {} + _update_db_opts(db_url, db_opts) + eng = engines.testing_engine(db_url, db_opts) + _post_configure_engine(db_url, eng, follower_ident) + eng.connect().close() + cfg = config.Config.register(eng, db_opts, options, file_config) + if follower_ident: + _configure_follower(cfg, follower_ident) + return cfg + + +def drop_follower_db(follower_ident): + for cfg in _configs_for_db_operation(): + _drop_db(cfg, cfg.db, follower_ident) + + +def _configs_for_db_operation(): + hosts = set() + + for cfg in config.Config.all_configs(): + cfg.db.dispose() + + for cfg in config.Config.all_configs(): + url = cfg.db.url + backend = url.get_backend_name() + host_conf = ( + backend, + url.username, url.host, url.database) + + if host_conf not in hosts: + yield cfg + hosts.add(host_conf) + + for cfg in config.Config.all_configs(): + cfg.db.dispose() + + +@register.init +def _create_db(cfg, eng, ident): + raise NotImplementedError("no DB creation routine for cfg: %s" % eng.url) + + +@register.init +def _drop_db(cfg, eng, ident): + raise NotImplementedError("no DB drop routine for cfg: %s" % eng.url) + + +@register.init +def _update_db_opts(db_url, db_opts): + pass + + +@register.init +def _configure_follower(cfg, ident): + pass + + +@register.init +def _post_configure_engine(url, engine, follower_ident): + pass + + +@register.init +def _follower_url_from_main(url, ident): + url = sa_url.make_url(url) + url.database = ident + return url + + +@_update_db_opts.for_db("mssql") +def _mssql_update_db_opts(db_url, db_opts): + db_opts['legacy_schema_aliasing'] = False + + +@_follower_url_from_main.for_db("sqlite") +def _sqlite_follower_url_from_main(url, ident): + url = sa_url.make_url(url) + if not url.database or url.database == ':memory:': + return url + else: + return sa_url.make_url("sqlite:///%s.db" % ident) + + +@_post_configure_engine.for_db("sqlite") +def _sqlite_post_configure_engine(url, engine, follower_ident): + from sqlalchemy import event + + @event.listens_for(engine, "connect") + def connect(dbapi_connection, connection_record): + # use file DBs in all cases, memory acts kind of strangely + # as an attached + if not follower_ident: + dbapi_connection.execute( + 'ATTACH DATABASE "test_schema.db" AS test_schema') + else: + dbapi_connection.execute( + 'ATTACH DATABASE "%s_test_schema.db" AS test_schema' + % follower_ident) + + +@_create_db.for_db("postgresql") +def _pg_create_db(cfg, eng, ident): + with eng.connect().execution_options( + isolation_level="AUTOCOMMIT") as conn: + try: + _pg_drop_db(cfg, conn, ident) + except Exception: + pass + currentdb = conn.scalar("select current_database()") + for attempt in range(3): + try: + conn.execute( + "CREATE DATABASE %s TEMPLATE %s" % (ident, currentdb)) + except exc.OperationalError as err: + if attempt != 2 and "accessed by other users" in str(err): + time.sleep(.2) + continue + else: + raise + else: + break + + +@_create_db.for_db("mysql") +def _mysql_create_db(cfg, eng, ident): + with eng.connect() as conn: + try: + _mysql_drop_db(cfg, conn, ident) + except Exception: + pass + conn.execute("CREATE DATABASE %s" % ident) + conn.execute("CREATE DATABASE %s_test_schema" % ident) + conn.execute("CREATE DATABASE %s_test_schema_2" % ident) + + +@_configure_follower.for_db("mysql") +def _mysql_configure_follower(config, ident): + config.test_schema = "%s_test_schema" % ident + config.test_schema_2 = "%s_test_schema_2" % ident + + +@_create_db.for_db("sqlite") +def _sqlite_create_db(cfg, eng, ident): + pass + + +@_drop_db.for_db("postgresql") +def _pg_drop_db(cfg, eng, ident): + with eng.connect().execution_options( + isolation_level="AUTOCOMMIT") as conn: + conn.execute( + text( + "select pg_terminate_backend(pid) from pg_stat_activity " + "where usename=current_user and pid != pg_backend_pid() " + "and datname=:dname" + ), dname=ident) + conn.execute("DROP DATABASE %s" % ident) + + +@_drop_db.for_db("sqlite") +def _sqlite_drop_db(cfg, eng, ident): + if ident: + os.remove("%s_test_schema.db" % ident) + else: + os.remove("%s.db" % ident) + + +@_drop_db.for_db("mysql") +def _mysql_drop_db(cfg, eng, ident): + with eng.connect() as conn: + conn.execute("DROP DATABASE %s_test_schema" % ident) + conn.execute("DROP DATABASE %s_test_schema_2" % ident) + conn.execute("DROP DATABASE %s" % ident) + + +@_create_db.for_db("oracle") +def _oracle_create_db(cfg, eng, ident): + # NOTE: make sure you've run "ALTER DATABASE default tablespace users" or + # similar, so that the default tablespace is not "system"; reflection will + # fail otherwise + with eng.connect() as conn: + conn.execute("create user %s identified by xe" % ident) + conn.execute("create user %s_ts1 identified by xe" % ident) + conn.execute("create user %s_ts2 identified by xe" % ident) + conn.execute("grant dba to %s" % (ident, )) + conn.execute("grant unlimited tablespace to %s" % ident) + conn.execute("grant unlimited tablespace to %s_ts1" % ident) + conn.execute("grant unlimited tablespace to %s_ts2" % ident) + +@_configure_follower.for_db("oracle") +def _oracle_configure_follower(config, ident): + config.test_schema = "%s_ts1" % ident + config.test_schema_2 = "%s_ts2" % ident + + +def _ora_drop_ignore(conn, dbname): + try: + conn.execute("drop user %s cascade" % dbname) + log.info("Reaped db: %s", dbname) + return True + except exc.DatabaseError as err: + log.warning("couldn't drop db: %s", err) + return False + + +@_drop_db.for_db("oracle") +def _oracle_drop_db(cfg, eng, ident): + with eng.connect() as conn: + # cx_Oracle seems to occasionally leak open connections when a large + # suite it run, even if we confirm we have zero references to + # connection objects. + # while there is a "kill session" command in Oracle, + # it unfortunately does not release the connection sufficiently. + _ora_drop_ignore(conn, ident) + _ora_drop_ignore(conn, "%s_ts1" % ident) + _ora_drop_ignore(conn, "%s_ts2" % ident) + + +def reap_oracle_dbs(eng): + log.info("Reaping Oracle dbs...") + with eng.connect() as conn: + to_reap = conn.execute( + "select u.username from all_users u where username " + "like 'TEST_%' and not exists (select username " + "from v$session where username=u.username)") + all_names = set([username.lower() for (username, ) in to_reap]) + to_drop = set() + for name in all_names: + if name.endswith("_ts1") or name.endswith("_ts2"): + continue + else: + to_drop.add(name) + if "%s_ts1" % name in all_names: + to_drop.add("%s_ts1" % name) + if "%s_ts2" % name in all_names: + to_drop.add("%s_ts2" % name) + + dropped = total = 0 + for total, username in enumerate(to_drop, 1): + if _ora_drop_ignore(conn, username): + dropped += 1 + log.info( + "Dropped %d out of %d stale databases detected", dropped, total) + + +@_follower_url_from_main.for_db("oracle") +def _oracle_follower_url_from_main(url, ident): + url = sa_url.make_url(url) + url.username = ident + url.password = 'xe' + return url + + diff --git a/lib/sqlalchemy/testing/replay_fixture.py b/lib/sqlalchemy/testing/replay_fixture.py new file mode 100644 index 0000000000..b50f52e3de --- /dev/null +++ b/lib/sqlalchemy/testing/replay_fixture.py @@ -0,0 +1,172 @@ +from . import fixtures +from . import profiling +from .. import util +import types +from collections import deque +import contextlib +from . import config +from sqlalchemy import MetaData +from sqlalchemy import create_engine +from sqlalchemy.orm import Session + + +class ReplayFixtureTest(fixtures.TestBase): + + @contextlib.contextmanager + def _dummy_ctx(self, *arg, **kw): + yield + + def test_invocation(self): + + dbapi_session = ReplayableSession() + creator = config.db.pool._creator + recorder = lambda: dbapi_session.recorder(creator()) + engine = create_engine( + config.db.url, creator=recorder, + use_native_hstore=False) + self.metadata = MetaData(engine) + self.engine = engine + self.session = Session(engine) + + self.setup_engine() + try: + self._run_steps(ctx=self._dummy_ctx) + finally: + self.teardown_engine() + engine.dispose() + + player = lambda: dbapi_session.player() + engine = create_engine( + config.db.url, creator=player, + use_native_hstore=False) + + self.metadata = MetaData(engine) + self.engine = engine + self.session = Session(engine) + + self.setup_engine() + try: + self._run_steps(ctx=profiling.count_functions) + finally: + self.session.close() + engine.dispose() + + def setup_engine(self): + pass + + def teardown_engine(self): + pass + + def _run_steps(self, ctx): + raise NotImplementedError() + + +class ReplayableSession(object): + """A simple record/playback tool. + + This is *not* a mock testing class. It only records a session for later + playback and makes no assertions on call consistency whatsoever. It's + unlikely to be suitable for anything other than DB-API recording. + + """ + + Callable = object() + NoAttribute = object() + + if util.py2k: + Natives = set([getattr(types, t) + for t in dir(types) if not t.startswith('_')]).\ + difference([getattr(types, t) + for t in ('FunctionType', 'BuiltinFunctionType', + 'MethodType', 'BuiltinMethodType', + 'LambdaType', 'UnboundMethodType',)]) + else: + Natives = set([getattr(types, t) + for t in dir(types) if not t.startswith('_')]).\ + union([type(t) if not isinstance(t, type) + else t for t in __builtins__.values()]).\ + difference([getattr(types, t) + for t in ('FunctionType', 'BuiltinFunctionType', + 'MethodType', 'BuiltinMethodType', + 'LambdaType', )]) + + def __init__(self): + self.buffer = deque() + + def recorder(self, base): + return self.Recorder(self.buffer, base) + + def player(self): + return self.Player(self.buffer) + + class Recorder(object): + def __init__(self, buffer, subject): + self._buffer = buffer + self._subject = subject + + def __call__(self, *args, **kw): + subject, buffer = [object.__getattribute__(self, x) + for x in ('_subject', '_buffer')] + + result = subject(*args, **kw) + if type(result) not in ReplayableSession.Natives: + buffer.append(ReplayableSession.Callable) + return type(self)(buffer, result) + else: + buffer.append(result) + return result + + @property + def _sqla_unwrap(self): + return self._subject + + def __getattribute__(self, key): + try: + return object.__getattribute__(self, key) + except AttributeError: + pass + + subject, buffer = [object.__getattribute__(self, x) + for x in ('_subject', '_buffer')] + try: + result = type(subject).__getattribute__(subject, key) + except AttributeError: + buffer.append(ReplayableSession.NoAttribute) + raise + else: + if type(result) not in ReplayableSession.Natives: + buffer.append(ReplayableSession.Callable) + return type(self)(buffer, result) + else: + buffer.append(result) + return result + + class Player(object): + def __init__(self, buffer): + self._buffer = buffer + + def __call__(self, *args, **kw): + buffer = object.__getattribute__(self, '_buffer') + result = buffer.popleft() + if result is ReplayableSession.Callable: + return self + else: + return result + + @property + def _sqla_unwrap(self): + return None + + def __getattribute__(self, key): + try: + return object.__getattribute__(self, key) + except AttributeError: + pass + buffer = object.__getattribute__(self, '_buffer') + result = buffer.popleft() + if result is ReplayableSession.Callable: + return self + elif result is ReplayableSession.NoAttribute: + raise AttributeError(key) + else: + return result diff --git a/lib/sqlalchemy/testing/requirements.py b/lib/sqlalchemy/testing/requirements.py index 07b5697e2f..5d31c4d6fa 100644 --- a/lib/sqlalchemy/testing/requirements.py +++ b/lib/sqlalchemy/testing/requirements.py @@ -1,5 +1,6 @@ # testing/requirements.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -10,16 +11,18 @@ target database. External dialect test suites should subclass SuiteRequirements -to provide specific inclusion/exlusions. +to provide specific inclusion/exclusions. """ from . import exclusions +from .. import util class Requirements(object): pass + class SuiteRequirements(Requirements): @property @@ -63,9 +66,9 @@ def on_update_or_deferrable_fks(self): # somehow only_if([x, y]) isn't working here, negation/conjunctions # getting confused. return exclusions.only_if( - lambda: self.on_update_cascade.enabled or self.deferrable_fks.enabled - ) - + lambda: self.on_update_cascade.enabled or + self.deferrable_fks.enabled + ) @property def self_referential_foreign_keys(self): @@ -93,7 +96,17 @@ def subqueries(self): @property def offset(self): - """target database can render OFFSET, or an equivalent, in a SELECT.""" + """target database can render OFFSET, or an equivalent, in a + SELECT. + """ + + return exclusions.open() + + @property + def bound_limit_offset(self): + """target database can render LIMIT and/or OFFSET using a bound + parameter + """ return exclusions.open() @@ -152,17 +165,16 @@ def fetch_rows_post_commit(self): return exclusions.open() - @property def empty_inserts(self): """target platform supports INSERT with no values, i.e. INSERT DEFAULT VALUES or equivalent.""" return exclusions.only_if( - lambda config: config.db.dialect.supports_empty_insert or \ - config.db.dialect.supports_default_values, - "empty inserts not supported" - ) + lambda config: config.db.dialect.supports_empty_insert or + config.db.dialect.supports_default_values, + "empty inserts not supported" + ) @property def insert_from_select(self): @@ -175,9 +187,9 @@ def returning(self): """target platform supports RETURNING.""" return exclusions.only_if( - lambda config: config.db.dialect.implicit_returning, - "'returning' not supported by database" - ) + lambda config: config.db.dialect.implicit_returning, + "%(database)s %(does_support)s 'returning'" + ) @property def duplicate_names_in_cursor_description(self): @@ -192,9 +204,9 @@ def denormalized_names(self): UPPERCASE as case insensitive names.""" return exclusions.skip_if( - lambda config: not config.db.dialect.requires_name_normalize, - "Backend does not require denormalized names." - ) + lambda config: not config.db.dialect.requires_name_normalize, + "Backend does not require denormalized names." + ) @property def multivalues_inserts(self): @@ -202,10 +214,9 @@ def multivalues_inserts(self): INSERT statement.""" return exclusions.skip_if( - lambda config: not config.db.dialect.supports_multivalues_insert, - "Backend does not support multirow inserts." - ) - + lambda config: not config.db.dialect.supports_multivalues_insert, + "Backend does not support multirow inserts." + ) @property def implements_get_lastrowid(self): @@ -253,8 +264,8 @@ def sequences(self): """Target database must support SEQUENCEs.""" return exclusions.only_if([ - lambda config: config.db.dialect.supports_sequences - ], "no sequence support") + lambda config: config.db.dialect.supports_sequences + ], "no sequence support") @property def sequences_optional(self): @@ -262,13 +273,9 @@ def sequences_optional(self): as a means of generating new PK values.""" return exclusions.only_if([ - lambda config: config.db.dialect.supports_sequences and \ - config.db.dialect.sequences_optional - ], "no sequence support, or sequences not optional") - - - - + lambda config: config.db.dialect.supports_sequences and + config.db.dialect.sequences_optional + ], "no sequence support, or sequences not optional") @property def reflects_pk_names(self): @@ -306,6 +313,25 @@ def primary_key_constraint_reflection(self): def foreign_key_constraint_reflection(self): return exclusions.open() + @property + def temp_table_reflection(self): + return exclusions.open() + + @property + def temp_table_names(self): + """target dialect supports listing of temporary table names""" + return exclusions.closed() + + @property + def temporary_tables(self): + """target database supports temporary tables""" + return exclusions.open() + + @property + def temporary_views(self): + """target database supports temporary views""" + return exclusions.closed() + @property def index_reflection(self): return exclusions.open() @@ -315,6 +341,14 @@ def unique_constraint_reflection(self): """target dialect supports reflection of unique constraints""" return exclusions.open() + @property + def duplicate_key_raises_integrity_error(self): + """target dialect raises IntegrityError when reporting an INSERT + with a primary key violation. (hint: it should) + + """ + return exclusions.open() + @property def unbounded_varchar(self): """Target database must support VARCHAR with no length""" @@ -332,7 +366,9 @@ def unicode_data(self): @property def unicode_ddl(self): - """Target driver must support some degree of non-ascii symbol names.""" + """Target driver must support some degree of non-ascii symbol + names. + """ return exclusions.closed() @property @@ -524,7 +560,6 @@ def two_phase_transactions(self): return exclusions.closed() - @property def update_from(self): """Target must support UPDATE..FROM syntax""" @@ -580,7 +615,17 @@ def order_by_label_with_expression(self): @property def unicode_connections(self): - """Target driver must support non-ASCII characters being passed at all.""" + """Target driver must support non-ASCII characters being passed at + all. + """ + return exclusions.open() + + @property + def graceful_disconnects(self): + """Target driver must raise a DBAPI-level exception, such as + InterfaceError, when the underlying connection has been closed + and the execute() method is called. + """ return exclusions.open() @property @@ -593,11 +638,44 @@ def ad_hoc_engines(self): """Test environment must allow ad-hoc engine/connection creation. DBs that scale poorly for many connections, even when closed, i.e. - Oracle, may use the "--low-connections" option which flags this requirement - as not present. + Oracle, may use the "--low-connections" option which flags this + requirement as not present. """ - return exclusions.skip_if(lambda config: config.options.low_connections) + return exclusions.skip_if( + lambda config: config.options.low_connections) + + @property + def timing_intensive(self): + return exclusions.requires_tag("timing_intensive") + + @property + def memory_intensive(self): + return exclusions.requires_tag("memory_intensive") + + @property + def threading_with_mock(self): + """Mark tests that use threading and mock at the same time - stability + issues have been observed with coverage + python 3.3 + + """ + return exclusions.skip_if( + lambda config: util.py3k and config.options.has_coverage, + "Stability issues with coverage + py3k" + ) + + @property + def no_coverage(self): + """Test should be skipped if coverage is enabled. + + This is to block tests that exercise libraries that seem to be + sensitive to coverage, such as Postgresql notice logging. + + """ + return exclusions.skip_if( + lambda config: config.options.has_coverage, + "Issues observed when coverage is enabled" + ) def _has_mysql_on_windows(self, config): return False @@ -612,8 +690,8 @@ def sqlite(self): @property def cextensions(self): return exclusions.skip_if( - lambda: not self._has_cextensions(), "C extensions not installed" - ) + lambda: not self._has_cextensions(), "C extensions not installed" + ) def _has_sqlite(self): from sqlalchemy import create_engine diff --git a/lib/sqlalchemy/testing/runner.py b/lib/sqlalchemy/testing/runner.py index 19aba53df1..b58aa019db 100644 --- a/lib/sqlalchemy/testing/runner.py +++ b/lib/sqlalchemy/testing/runner.py @@ -1,6 +1,7 @@ #!/usr/bin/env python # testing/runner.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -29,7 +30,7 @@ """ -from sqlalchemy.testing.plugin.noseplugin import NoseSQLAlchemy +from .plugin.noseplugin import NoseSQLAlchemy import nose @@ -37,6 +38,7 @@ def main(): nose.main(addplugins=[NoseSQLAlchemy()]) + def setup_py_test(): """Runner to use for the 'test_suite' entry of your setup.py. diff --git a/lib/sqlalchemy/testing/schema.py b/lib/sqlalchemy/testing/schema.py index 4766af180e..2cd6e4cd2f 100644 --- a/lib/sqlalchemy/testing/schema.py +++ b/lib/sqlalchemy/testing/schema.py @@ -1,5 +1,6 @@ # testing/schema.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -66,12 +67,12 @@ def Column(*args, **kw): test_opts = dict([(k, kw.pop(k)) for k in list(kw) if k.startswith('test_')]) - if config.requirements.foreign_key_ddl.predicate(config): + if not config.requirements.foreign_key_ddl.enabled_for_config(config): args = [arg for arg in args if not isinstance(arg, schema.ForeignKey)] col = schema.Column(*args, **kw) if 'test_needs_autoincrement' in test_opts and \ - kw.get('primary_key', False): + kw.get('primary_key', False): # allow any test suite to pick up on this col.info['test_needs_autoincrement'] = True @@ -82,19 +83,16 @@ def Column(*args, **kw): def add_seq(c, tbl): c._init_items( schema.Sequence(_truncate_name( - config.db.dialect, tbl.name + '_' + c.name + '_seq'), + config.db.dialect, tbl.name + '_' + c.name + '_seq'), optional=True) ) event.listen(col, 'after_parent_attach', add_seq, propagate=True) return col - - - def _truncate_name(dialect, name): if len(name) > dialect.max_identifier_length: return name[0:max(dialect.max_identifier_length - 6, 0)] + \ - "_" + hex(hash(name) % 64)[2:] + "_" + hex(hash(name) % 64)[2:] else: return name diff --git a/lib/sqlalchemy/testing/suite/__init__.py b/lib/sqlalchemy/testing/suite/__init__.py index 780aa40aa7..9eeffd4cb0 100644 --- a/lib/sqlalchemy/testing/suite/__init__.py +++ b/lib/sqlalchemy/testing/suite/__init__.py @@ -1,4 +1,5 @@ +from sqlalchemy.testing.suite.test_dialect import * from sqlalchemy.testing.suite.test_ddl import * from sqlalchemy.testing.suite.test_insert import * from sqlalchemy.testing.suite.test_sequence import * diff --git a/lib/sqlalchemy/testing/suite/test_ddl.py b/lib/sqlalchemy/testing/suite/test_ddl.py index 2dca1443d4..1d8010c8ae 100644 --- a/lib/sqlalchemy/testing/suite/test_ddl.py +++ b/lib/sqlalchemy/testing/suite/test_ddl.py @@ -12,15 +12,17 @@ class TableDDLTest(fixtures.TestBase): def _simple_fixture(self): return Table('test_table', self.metadata, - Column('id', Integer, primary_key=True, autoincrement=False), - Column('data', String(50)) - ) + Column('id', Integer, primary_key=True, + autoincrement=False), + Column('data', String(50)) + ) def _underscore_fixture(self): return Table('_test_table', self.metadata, - Column('id', Integer, primary_key=True, autoincrement=False), - Column('_data', String(50)) - ) + Column('id', Integer, primary_key=True, + autoincrement=False), + Column('_data', String(50)) + ) def _simple_roundtrip(self, table): with config.db.begin() as conn: diff --git a/lib/sqlalchemy/testing/suite/test_dialect.py b/lib/sqlalchemy/testing/suite/test_dialect.py new file mode 100644 index 0000000000..00884a212c --- /dev/null +++ b/lib/sqlalchemy/testing/suite/test_dialect.py @@ -0,0 +1,41 @@ +from .. import fixtures, config +from ..config import requirements +from sqlalchemy import exc +from sqlalchemy import Integer, String +from .. import assert_raises +from ..schema import Table, Column + + +class ExceptionTest(fixtures.TablesTest): + """Test basic exception wrapping. + + DBAPIs vary a lot in exception behavior so to actually anticipate + specific exceptions from real round trips, we need to be conservative. + + """ + run_deletes = 'each' + + __backend__ = True + + @classmethod + def define_tables(cls, metadata): + Table('manual_pk', metadata, + Column('id', Integer, primary_key=True, autoincrement=False), + Column('data', String(50)) + ) + + @requirements.duplicate_key_raises_integrity_error + def test_integrity_error(self): + + with config.db.begin() as conn: + conn.execute( + self.tables.manual_pk.insert(), + {'id': 1, 'data': 'd1'} + ) + + assert_raises( + exc.IntegrityError, + conn.execute, + self.tables.manual_pk.insert(), + {'id': 1, 'data': 'd1'} + ) diff --git a/lib/sqlalchemy/testing/suite/test_insert.py b/lib/sqlalchemy/testing/suite/test_insert.py index 3444e15c8d..70e8a6b17b 100644 --- a/lib/sqlalchemy/testing/suite/test_insert.py +++ b/lib/sqlalchemy/testing/suite/test_insert.py @@ -4,7 +4,7 @@ from ..assertions import eq_ from .. import engines -from sqlalchemy import Integer, String, select, util +from sqlalchemy import Integer, String, select, literal_column, literal from ..schema import Table, Column @@ -21,15 +21,15 @@ class LastrowidTest(fixtures.TablesTest): @classmethod def define_tables(cls, metadata): Table('autoinc_pk', metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('data', String(50)) - ) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', String(50)) + ) Table('manual_pk', metadata, - Column('id', Integer, primary_key=True, autoincrement=False), - Column('data', String(50)) - ) + Column('id', Integer, primary_key=True, autoincrement=False), + Column('data', String(50)) + ) def _assert_round_trip(self, table, conn): row = conn.execute(table.select()).first() @@ -59,8 +59,9 @@ def test_last_inserted_id(self): ) # failed on pypy1.9 but seems to be OK on pypy 2.1 - #@exclusions.fails_if(lambda: util.pypy, "lastrowid not maintained after " - # "connection close") + # @exclusions.fails_if(lambda: util.pypy, + # "lastrowid not maintained after " + # "connection close") @requirements.dbapi_lastrowid def test_native_lastrowid_autoinc(self): r = config.db.execute( @@ -81,19 +82,26 @@ class InsertBehaviorTest(fixtures.TablesTest): @classmethod def define_tables(cls, metadata): Table('autoinc_pk', metadata, - Column('id', Integer, primary_key=True, \ - test_needs_autoincrement=True), - Column('data', String(50)) - ) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', String(50)) + ) Table('manual_pk', metadata, - Column('id', Integer, primary_key=True, autoincrement=False), - Column('data', String(50)) - ) + Column('id', Integer, primary_key=True, autoincrement=False), + Column('data', String(50)) + ) + Table('includes_defaults', metadata, + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', String(50)), + Column('x', Integer, default=5), + Column('y', Integer, + default=literal_column("2", type_=Integer) + literal(2))) def test_autoclose_on_insert(self): if requirements.returning.enabled: engine = engines.testing_engine( - options={'implicit_returning': False}) + options={'implicit_returning': False}) else: engine = config.db @@ -101,7 +109,8 @@ def test_autoclose_on_insert(self): self.tables.autoinc_pk.insert(), data="some data" ) - assert r.closed + assert r._soft_closed + assert not r.closed assert r.is_insert assert not r.returns_rows @@ -111,7 +120,8 @@ def test_autoclose_on_insert_implicit_returning(self): self.tables.autoinc_pk.insert(), data="some data" ) - assert r.closed + assert r._soft_closed + assert not r.closed assert r.is_insert assert not r.returns_rows @@ -119,12 +129,13 @@ def test_autoclose_on_insert_implicit_returning(self): def test_empty_insert(self): r = config.db.execute( self.tables.autoinc_pk.insert(), - ) - assert r.closed + ) + assert r._soft_closed + assert not r.closed r = config.db.execute( - self.tables.autoinc_pk.select().\ - where(self.tables.autoinc_pk.c.id != None) + self.tables.autoinc_pk.select(). + where(self.tables.autoinc_pk.c.id != None) ) assert len(r.fetchall()) @@ -133,21 +144,20 @@ def test_empty_insert(self): def test_insert_from_select(self): table = self.tables.manual_pk config.db.execute( - table.insert(), - [ - dict(id=1, data="data1"), - dict(id=2, data="data2"), - dict(id=3, data="data3"), - ] + table.insert(), + [ + dict(id=1, data="data1"), + dict(id=2, data="data2"), + dict(id=3, data="data3"), + ] ) - config.db.execute( - table.insert(inline=True). - from_select( - ("id", "data",), select([table.c.id + 5, table.c.data]).where( - table.c.data.in_(["data2", "data3"])) - ), + table.insert(inline=True). + from_select(("id", "data",), + select([table.c.id + 5, table.c.data]). + where(table.c.data.in_(["data2", "data3"])) + ), ) eq_( @@ -158,6 +168,35 @@ def test_insert_from_select(self): ("data3", ), ("data3", )] ) + @requirements.insert_from_select + def test_insert_from_select_with_defaults(self): + table = self.tables.includes_defaults + config.db.execute( + table.insert(), + [ + dict(id=1, data="data1"), + dict(id=2, data="data2"), + dict(id=3, data="data3"), + ] + ) + + config.db.execute( + table.insert(inline=True). + from_select(("id", "data",), + select([table.c.id + 5, table.c.data]). + where(table.c.data.in_(["data2", "data3"])) + ), + ) + + eq_( + config.db.execute( + select([table]).order_by(table.c.data, table.c.id) + ).fetchall(), + [(1, 'data1', 5, 4), (2, 'data2', 5, 4), + (7, 'data2', 5, 4), (3, 'data3', 5, 4), (8, 'data3', 5, 4)] + ) + + class ReturningTest(fixtures.TablesTest): run_create_tables = 'each' __requires__ = 'returning', 'autoincrement_insert' @@ -175,10 +214,10 @@ def _assert_round_trip(self, table, conn): @classmethod def define_tables(cls, metadata): Table('autoinc_pk', metadata, - Column('id', Integer, primary_key=True, \ - test_needs_autoincrement=True), - Column('data', String(50)) - ) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', String(50)) + ) @requirements.fetch_rows_post_commit def test_explicit_returning_pk_autocommit(self): @@ -186,7 +225,7 @@ def test_explicit_returning_pk_autocommit(self): table = self.tables.autoinc_pk r = engine.execute( table.insert().returning( - table.c.id), + table.c.id), data="some data" ) pk = r.first()[0] @@ -199,7 +238,7 @@ def test_explicit_returning_pk_no_autocommit(self): with engine.begin() as conn: r = conn.execute( table.insert().returning( - table.c.id), + table.c.id), data="some data" ) pk = r.first()[0] diff --git a/lib/sqlalchemy/testing/suite/test_reflection.py b/lib/sqlalchemy/testing/suite/test_reflection.py index 762c9955c9..1874f6210e 100644 --- a/lib/sqlalchemy/testing/suite/test_reflection.py +++ b/lib/sqlalchemy/testing/suite/test_reflection.py @@ -24,9 +24,9 @@ class HasTableTest(fixtures.TablesTest): @classmethod def define_tables(cls, metadata): Table('test_table', metadata, - Column('id', Integer, primary_key=True), - Column('data', String(50)) - ) + Column('id', Integer, primary_key=True), + Column('data', String(50)) + ) def test_has_table(self): with config.db.begin() as conn: @@ -34,18 +34,25 @@ def test_has_table(self): assert not config.db.dialect.has_table(conn, "nonexistent_table") - - class ComponentReflectionTest(fixtures.TablesTest): run_inserts = run_deletes = None __backend__ = True + @classmethod + def setup_bind(cls): + if config.requirements.independent_connections.enabled: + from sqlalchemy import pool + return engines.testing_engine( + options=dict(poolclass=pool.StaticPool)) + else: + return config.db + @classmethod def define_tables(cls, metadata): cls.define_reflected_tables(metadata, None) if testing.requires.schemas.enabled: - cls.define_reflected_tables(metadata, "test_schema") + cls.define_reflected_tables(metadata, testing.config.test_schema) @classmethod def define_reflected_tables(cls, metadata, schema): @@ -56,46 +63,84 @@ def define_reflected_tables(cls, metadata, schema): if testing.requires.self_referential_foreign_keys.enabled: users = Table('users', metadata, - Column('user_id', sa.INT, primary_key=True), - Column('test1', sa.CHAR(5), nullable=False), - Column('test2', sa.Float(5), nullable=False), - Column('parent_user_id', sa.Integer, - sa.ForeignKey('%susers.user_id' % schema_prefix)), - schema=schema, - test_needs_fk=True, - ) + Column('user_id', sa.INT, primary_key=True), + Column('test1', sa.CHAR(5), nullable=False), + Column('test2', sa.Float(5), nullable=False), + Column('parent_user_id', sa.Integer, + sa.ForeignKey('%susers.user_id' % + schema_prefix)), + schema=schema, + test_needs_fk=True, + ) else: users = Table('users', metadata, - Column('user_id', sa.INT, primary_key=True), - Column('test1', sa.CHAR(5), nullable=False), - Column('test2', sa.Float(5), nullable=False), - schema=schema, - test_needs_fk=True, - ) + Column('user_id', sa.INT, primary_key=True), + Column('test1', sa.CHAR(5), nullable=False), + Column('test2', sa.Float(5), nullable=False), + schema=schema, + test_needs_fk=True, + ) Table("dingalings", metadata, - Column('dingaling_id', sa.Integer, primary_key=True), - Column('address_id', sa.Integer, - sa.ForeignKey('%semail_addresses.address_id' % - schema_prefix)), - Column('data', sa.String(30)), - schema=schema, - test_needs_fk=True, - ) + Column('dingaling_id', sa.Integer, primary_key=True), + Column('address_id', sa.Integer, + sa.ForeignKey('%semail_addresses.address_id' % + schema_prefix)), + Column('data', sa.String(30)), + schema=schema, + test_needs_fk=True, + ) Table('email_addresses', metadata, - Column('address_id', sa.Integer), - Column('remote_user_id', sa.Integer, - sa.ForeignKey(users.c.user_id)), - Column('email_address', sa.String(20)), - sa.PrimaryKeyConstraint('address_id', name='email_ad_pk'), - schema=schema, - test_needs_fk=True, - ) + Column('address_id', sa.Integer), + Column('remote_user_id', sa.Integer, + sa.ForeignKey(users.c.user_id)), + Column('email_address', sa.String(20)), + sa.PrimaryKeyConstraint('address_id', name='email_ad_pk'), + schema=schema, + test_needs_fk=True, + ) if testing.requires.index_reflection.enabled: cls.define_index(metadata, users) if testing.requires.view_column_reflection.enabled: cls.define_views(metadata, schema) + if not schema and testing.requires.temp_table_reflection.enabled: + cls.define_temp_tables(metadata) + + @classmethod + def define_temp_tables(cls, metadata): + # cheat a bit, we should fix this with some dialect-level + # temp table fixture + if testing.against("oracle"): + kw = { + 'prefixes': ["GLOBAL TEMPORARY"], + 'oracle_on_commit': 'PRESERVE ROWS' + } + else: + kw = { + 'prefixes': ["TEMPORARY"], + } + + user_tmp = Table( + "user_tmp", metadata, + Column("id", sa.INT, primary_key=True), + Column('name', sa.VARCHAR(50)), + Column('foo', sa.INT), + sa.UniqueConstraint('name', name='user_tmp_uq'), + sa.Index("user_tmp_ix", "foo"), + **kw + ) + if testing.requires.view_reflection.enabled and \ + testing.requires.temporary_views.enabled: + event.listen( + user_tmp, "after_create", + DDL("create temporary view user_tmp_v as " + "select * from user_tmp") + ) + event.listen( + user_tmp, "before_drop", + DDL("drop view user_tmp_v") + ) @classmethod def define_index(cls, metadata, users): @@ -110,7 +155,7 @@ def define_views(cls, metadata, schema): fullname = "%s.%s" % (schema, table_name) view_name = fullname + '_v' query = "CREATE VIEW %s AS SELECT * FROM %s" % ( - view_name, fullname) + view_name, fullname) event.listen( metadata, @@ -127,7 +172,7 @@ def define_views(cls, metadata, schema): def test_get_schema_names(self): insp = inspect(testing.db) - self.assert_('test_schema' in insp.get_schema_names()) + self.assert_(testing.config.test_schema in insp.get_schema_names()) @testing.requires.schema_reflection def test_dialect_initialize(self): @@ -146,8 +191,9 @@ def _test_get_table_names(self, schema=None, table_type='table', order_by=None): meta = self.metadata users, addresses, dingalings = self.tables.users, \ - self.tables.email_addresses, self.tables.dingalings + self.tables.email_addresses, self.tables.dingalings insp = inspect(meta.bind) + if table_type == 'view': table_names = insp.get_view_names(schema) table_names.sort() @@ -163,6 +209,20 @@ def _test_get_table_names(self, schema=None, table_type='table', answer = ['dingalings', 'email_addresses', 'users'] eq_(sorted(table_names), answer) + @testing.requires.temp_table_names + def test_get_temp_table_names(self): + insp = inspect(self.bind) + temp_table_names = insp.get_temp_table_names() + eq_(sorted(temp_table_names), ['user_tmp']) + + @testing.requires.view_reflection + @testing.requires.temp_table_names + @testing.requires.temporary_views + def test_get_temp_view_names(self): + insp = inspect(self.bind) + temp_table_names = insp.get_temp_view_names() + eq_(sorted(temp_table_names), ['user_tmp_v']) + @testing.requires.table_reflection def test_get_table_names(self): self._test_get_table_names() @@ -175,7 +235,7 @@ def test_get_table_names_fks(self): @testing.requires.table_reflection @testing.requires.schemas def test_get_table_names_with_schema(self): - self._test_get_table_names('test_schema') + self._test_get_table_names(testing.config.test_schema) @testing.requires.view_column_reflection def test_get_view_names(self): @@ -184,7 +244,8 @@ def test_get_view_names(self): @testing.requires.view_column_reflection @testing.requires.schemas def test_get_view_names_with_schema(self): - self._test_get_table_names('test_schema', table_type='view') + self._test_get_table_names( + testing.config.test_schema, table_type='view') @testing.requires.table_reflection @testing.requires.view_column_reflection @@ -195,13 +256,13 @@ def test_get_tables_and_views(self): def _test_get_columns(self, schema=None, table_type='table'): meta = MetaData(testing.db) users, addresses, dingalings = self.tables.users, \ - self.tables.email_addresses, self.tables.dingalings + self.tables.email_addresses, self.tables.dingalings table_names = ['users', 'email_addresses'] if table_type == 'view': table_names = ['users_v', 'email_addresses_v'] insp = inspect(meta.bind) for table_name, table in zip(table_names, (users, - addresses)): + addresses)): schema_name = schema cols = insp.get_columns(table_name, schema=schema_name) self.assert_(len(cols) > 0, len(cols)) @@ -218,23 +279,24 @@ def _test_get_columns(self, schema=None, table_type='table'): # Oracle returns Date for DateTime. if testing.against('oracle') and ctype_def \ - in (sql_types.Date, sql_types.DateTime): + in (sql_types.Date, sql_types.DateTime): ctype_def = sql_types.Date # assert that the desired type and return type share # a base within one of the generic types. self.assert_(len(set(ctype.__mro__). - intersection(ctype_def.__mro__).intersection([ - sql_types.Integer, - sql_types.Numeric, - sql_types.DateTime, - sql_types.Date, - sql_types.Time, - sql_types.String, - sql_types._Binary, - ])) > 0, '%s(%s), %s(%s)' % (col.name, - col.type, cols[i]['name'], ctype)) + intersection(ctype_def.__mro__). + intersection([ + sql_types.Integer, + sql_types.Numeric, + sql_types.DateTime, + sql_types.Date, + sql_types.Time, + sql_types.String, + sql_types._Binary, + ])) > 0, '%s(%s), %s(%s)' % + (col.name, col.type, cols[i]['name'], ctype)) if not col.primary_key: assert cols[i]['default'] is None @@ -246,11 +308,11 @@ def test_get_columns(self): @testing.provide_metadata def _type_round_trip(self, *types): t = Table('t', self.metadata, - *[ - Column('t%d' % i, type_) - for i, type_ in enumerate(types) - ] - ) + *[ + Column('t%d' % i, type_) + for i, type_ in enumerate(types) + ] + ) t.create() return [ @@ -261,8 +323,8 @@ def _type_round_trip(self, *types): @testing.requires.table_reflection def test_numeric_reflection(self): for typ in self._type_round_trip( - sql_types.Numeric(18, 5), - ): + sql_types.Numeric(18, 5), + ): assert isinstance(typ, sql_types.Numeric) eq_(typ.precision, 18) eq_(typ.scale, 5) @@ -277,8 +339,8 @@ def test_varchar_reflection(self): @testing.provide_metadata def test_nullable_reflection(self): t = Table('t', self.metadata, - Column('a', Integer, nullable=True), - Column('b', Integer, nullable=False)) + Column('a', Integer, nullable=True), + Column('b', Integer, nullable=False)) t.create() eq_( dict( @@ -288,11 +350,32 @@ def test_nullable_reflection(self): {"a": True, "b": False} ) - @testing.requires.table_reflection @testing.requires.schemas def test_get_columns_with_schema(self): - self._test_get_columns(schema='test_schema') + self._test_get_columns(schema=testing.config.test_schema) + + @testing.requires.temp_table_reflection + def test_get_temp_table_columns(self): + meta = MetaData(self.bind) + user_tmp = self.tables.user_tmp + insp = inspect(meta.bind) + cols = insp.get_columns('user_tmp') + self.assert_(len(cols) > 0, len(cols)) + + for i, col in enumerate(user_tmp.columns): + eq_(col.name, cols[i]['name']) + + @testing.requires.temp_table_reflection + @testing.requires.view_column_reflection + @testing.requires.temporary_views + def test_get_temp_view_columns(self): + insp = inspect(self.bind) + cols = insp.get_columns('user_tmp_v') + eq_( + [col['name'] for col in cols], + ['id', 'name', 'foo'] + ) @testing.requires.view_column_reflection def test_get_view_columns(self): @@ -301,7 +384,8 @@ def test_get_view_columns(self): @testing.requires.view_column_reflection @testing.requires.schemas def test_get_view_columns_with_schema(self): - self._test_get_columns(schema='test_schema', table_type='view') + self._test_get_columns( + schema=testing.config.test_schema, table_type='view') @testing.provide_metadata def _test_get_pk_constraint(self, schema=None): @@ -311,11 +395,11 @@ def _test_get_pk_constraint(self, schema=None): users_cons = insp.get_pk_constraint(users.name, schema=schema) users_pkeys = users_cons['constrained_columns'] - eq_(users_pkeys, ['user_id']) + eq_(users_pkeys, ['user_id']) addr_cons = insp.get_pk_constraint(addresses.name, schema=schema) addr_pkeys = addr_cons['constrained_columns'] - eq_(addr_pkeys, ['address_id']) + eq_(addr_pkeys, ['address_id']) with testing.requires.reflects_pk_names.fail_if(): eq_(addr_cons['name'], 'email_ad_pk') @@ -328,7 +412,7 @@ def test_get_pk_constraint(self): @testing.requires.primary_key_constraint_reflection @testing.requires.schemas def test_get_pk_constraint_with_schema(self): - self._test_get_pk_constraint(schema='test_schema') + self._test_get_pk_constraint(schema=testing.config.test_schema) @testing.requires.table_reflection @testing.provide_metadata @@ -347,7 +431,7 @@ def test_deprecated_get_primary_keys(self): def _test_get_foreign_keys(self, schema=None): meta = self.metadata users, addresses, dingalings = self.tables.users, \ - self.tables.email_addresses, self.tables.dingalings + self.tables.email_addresses, self.tables.dingalings insp = inspect(meta.bind) expected_schema = schema # users @@ -366,7 +450,7 @@ def _test_get_foreign_keys(self, schema=None): if testing.requires.self_referential_foreign_keys.enabled: eq_(fkey1['constrained_columns'], ['parent_user_id']) - #addresses + # addresses addr_fkeys = insp.get_foreign_keys(addresses.name, schema=schema) fkey1 = addr_fkeys[0] @@ -386,13 +470,13 @@ def test_get_foreign_keys(self): @testing.requires.foreign_key_constraint_reflection @testing.requires.schemas def test_get_foreign_keys_with_schema(self): - self._test_get_foreign_keys(schema='test_schema') + self._test_get_foreign_keys(schema=testing.config.test_schema) @testing.provide_metadata def _test_get_indexes(self, schema=None): meta = self.metadata users, addresses, dingalings = self.tables.users, \ - self.tables.email_addresses, self.tables.dingalings + self.tables.email_addresses, self.tables.dingalings # The database may decide to create indexes for foreign keys, etc. # so there may be more indexes than expected. insp = inspect(meta.bind) @@ -419,26 +503,57 @@ def test_get_indexes(self): @testing.requires.index_reflection @testing.requires.schemas def test_get_indexes_with_schema(self): - self._test_get_indexes(schema='test_schema') - + self._test_get_indexes(schema=testing.config.test_schema) @testing.requires.unique_constraint_reflection def test_get_unique_constraints(self): self._test_get_unique_constraints() + @testing.requires.temp_table_reflection + @testing.requires.unique_constraint_reflection + def test_get_temp_table_unique_constraints(self): + insp = inspect(self.bind) + reflected = insp.get_unique_constraints('user_tmp') + for refl in reflected: + # Different dialects handle duplicate index and constraints + # differently, so ignore this flag + refl.pop('duplicates_index', None) + eq_(reflected, [{'column_names': ['name'], 'name': 'user_tmp_uq'}]) + + @testing.requires.temp_table_reflection + def test_get_temp_table_indexes(self): + insp = inspect(self.bind) + indexes = insp.get_indexes('user_tmp') + for ind in indexes: + ind.pop('dialect_options', None) + eq_( + # TODO: we need to add better filtering for indexes/uq constraints + # that are doubled up + [idx for idx in indexes if idx['name'] == 'user_tmp_ix'], + [{'unique': False, 'column_names': ['foo'], 'name': 'user_tmp_ix'}] + ) + @testing.requires.unique_constraint_reflection @testing.requires.schemas def test_get_unique_constraints_with_schema(self): - self._test_get_unique_constraints(schema='test_schema') + self._test_get_unique_constraints(schema=testing.config.test_schema) @testing.provide_metadata def _test_get_unique_constraints(self, schema=None): + # SQLite dialect needs to parse the names of the constraints + # separately from what it gets from PRAGMA index_list(), and + # then matches them up. so same set of column_names in two + # constraints will confuse it. Perhaps we should no longer + # bother with index_list() here since we have the whole + # CREATE TABLE? uniques = sorted( [ {'name': 'unique_a', 'column_names': ['a']}, {'name': 'unique_a_b_c', 'column_names': ['a', 'b', 'c']}, {'name': 'unique_c_a_b', 'column_names': ['c', 'a', 'b']}, {'name': 'unique_asc_key', 'column_names': ['asc', 'key']}, + {'name': 'i.have.dots', 'column_names': ['b']}, + {'name': 'i have spaces', 'column_names': ['c']}, ], key=operator.itemgetter('name') ) @@ -466,14 +581,16 @@ def _test_get_unique_constraints(self, schema=None): ) for orig, refl in zip(uniques, reflected): + # Different dialects handle duplicate index and constraints + # differently, so ignore this flag + refl.pop('duplicates_index', None) eq_(orig, refl) - @testing.provide_metadata def _test_get_view_definition(self, schema=None): meta = self.metadata users, addresses, dingalings = self.tables.users, \ - self.tables.email_addresses, self.tables.dingalings + self.tables.email_addresses, self.tables.dingalings view_name1 = 'users_v' view_name2 = 'email_addresses_v' insp = inspect(meta.bind) @@ -489,14 +606,14 @@ def test_get_view_definition(self): @testing.requires.view_reflection @testing.requires.schemas def test_get_view_definition_with_schema(self): - self._test_get_view_definition(schema='test_schema') + self._test_get_view_definition(schema=testing.config.test_schema) @testing.only_on("postgresql", "PG specific feature") @testing.provide_metadata def _test_get_table_oid(self, table_name, schema=None): meta = self.metadata users, addresses, dingalings = self.tables.users, \ - self.tables.email_addresses, self.tables.dingalings + self.tables.email_addresses, self.tables.dingalings insp = inspect(meta.bind) oid = insp.get_table_oid(table_name, schema) self.assert_(isinstance(oid, int)) @@ -506,7 +623,7 @@ def test_get_table_oid(self): @testing.requires.schemas def test_get_table_oid_with_schema(self): - self._test_get_table_oid('users', schema='test_schema') + self._test_get_table_oid('users', schema=testing.config.test_schema) @testing.requires.table_reflection @testing.provide_metadata @@ -527,14 +644,13 @@ def test_autoincrement_col(self): insp = inspect(meta.bind) for tname, cname in [ - ('users', 'user_id'), - ('email_addresses', 'address_id'), - ('dingalings', 'dingaling_id'), - ]: + ('users', 'user_id'), + ('email_addresses', 'address_id'), + ('dingalings', 'dingaling_id'), + ]: cols = insp.get_columns(tname) id_ = dict((c['name'], c) for c in cols)[cname] assert id_.get('autoincrement', True) - __all__ = ('ComponentReflectionTest', 'HasTableTest') diff --git a/lib/sqlalchemy/testing/suite/test_results.py b/lib/sqlalchemy/testing/suite/test_results.py index 2fdab4d17a..9ffaa6e04d 100644 --- a/lib/sqlalchemy/testing/suite/test_results.py +++ b/lib/sqlalchemy/testing/suite/test_results.py @@ -15,13 +15,13 @@ class RowFetchTest(fixtures.TablesTest): @classmethod def define_tables(cls, metadata): Table('plain_pk', metadata, - Column('id', Integer, primary_key=True), - Column('data', String(50)) - ) + Column('id', Integer, primary_key=True), + Column('data', String(50)) + ) Table('has_dates', metadata, - Column('id', Integer, primary_key=True), - Column('today', DateTime) - ) + Column('id', Integer, primary_key=True), + Column('today', DateTime) + ) @classmethod def insert_data(cls): @@ -43,9 +43,9 @@ def insert_data(cls): def test_via_string(self): row = config.db.execute( - self.tables.plain_pk.select().\ - order_by(self.tables.plain_pk.c.id) - ).first() + self.tables.plain_pk.select(). + order_by(self.tables.plain_pk.c.id) + ).first() eq_( row['id'], 1 @@ -56,9 +56,9 @@ def test_via_string(self): def test_via_int(self): row = config.db.execute( - self.tables.plain_pk.select().\ - order_by(self.tables.plain_pk.c.id) - ).first() + self.tables.plain_pk.select(). + order_by(self.tables.plain_pk.c.id) + ).first() eq_( row[0], 1 @@ -69,9 +69,9 @@ def test_via_int(self): def test_via_col_object(self): row = config.db.execute( - self.tables.plain_pk.select().\ - order_by(self.tables.plain_pk.c.id) - ).first() + self.tables.plain_pk.select(). + order_by(self.tables.plain_pk.c.id) + ).first() eq_( row[self.tables.plain_pk.c.id], 1 @@ -83,15 +83,14 @@ def test_via_col_object(self): @requirements.duplicate_names_in_cursor_description def test_row_with_dupe_names(self): result = config.db.execute( - select([self.tables.plain_pk.c.data, - self.tables.plain_pk.c.data.label('data')]).\ - order_by(self.tables.plain_pk.c.id) - ) + select([self.tables.plain_pk.c.data, + self.tables.plain_pk.c.data.label('data')]). + order_by(self.tables.plain_pk.c.id) + ) row = result.first() eq_(result.keys(), ['data', 'data']) eq_(row, ('d1', 'd1')) - def test_row_w_scalar_select(self): """test that a scalar select as a column is returned as such and that type conversion works OK. @@ -124,12 +123,13 @@ class PercentSchemaNamesTest(fixtures.TablesTest): @classmethod def define_tables(cls, metadata): cls.tables.percent_table = Table('percent%table', metadata, - Column("percent%", Integer), - Column("spaces % more spaces", Integer), - ) - cls.tables.lightweight_percent_table = sql.table('percent%table', - sql.column("percent%"), - sql.column("spaces % more spaces"), + Column("percent%", Integer), + Column( + "spaces % more spaces", Integer), + ) + cls.tables.lightweight_percent_table = sql.table( + 'percent%table', sql.column("percent%"), + sql.column("spaces % more spaces") ) def test_single_roundtrip(self): @@ -152,8 +152,8 @@ def test_executemany_roundtrip(self): config.db.execute( percent_table.insert(), [{'percent%': 7, 'spaces % more spaces': 11}, - {'percent%': 9, 'spaces % more spaces': 10}, - {'percent%': 11, 'spaces % more spaces': 9}] + {'percent%': 9, 'spaces % more spaces': 10}, + {'percent%': 11, 'spaces % more spaces': 9}] ) self._assert_table() @@ -162,10 +162,10 @@ def _assert_table(self): lightweight_percent_table = self.tables.lightweight_percent_table for table in ( - percent_table, - percent_table.alias(), - lightweight_percent_table, - lightweight_percent_table.alias()): + percent_table, + percent_table.alias(), + lightweight_percent_table, + lightweight_percent_table.alias()): eq_( list( config.db.execute( @@ -184,18 +184,18 @@ def _assert_table(self): list( config.db.execute( table.select(). - where(table.c['spaces % more spaces'].in_([9, 10])). - order_by(table.c['percent%']), + where(table.c['spaces % more spaces'].in_([9, 10])). + order_by(table.c['percent%']), ) ), - [ - (9, 10), - (11, 9) - ] + [ + (9, 10), + (11, 9) + ] ) - row = config.db.execute(table.select().\ - order_by(table.c['percent%'])).first() + row = config.db.execute(table.select(). + order_by(table.c['percent%'])).first() eq_(row['percent%'], 5) eq_(row['spaces % more spaces'], 12) @@ -211,9 +211,9 @@ def _assert_table(self): eq_( list( config.db.execute( - percent_table.\ - select().\ - order_by(percent_table.c['percent%']) + percent_table. + select(). + order_by(percent_table.c['percent%']) ) ), [(5, 15), (7, 15), (9, 15), (11, 15)] diff --git a/lib/sqlalchemy/testing/suite/test_select.py b/lib/sqlalchemy/testing/suite/test_select.py index 2ccff61ea7..d4bf63b55b 100644 --- a/lib/sqlalchemy/testing/suite/test_select.py +++ b/lib/sqlalchemy/testing/suite/test_select.py @@ -2,7 +2,8 @@ from ..assertions import eq_ from sqlalchemy import util -from sqlalchemy import Integer, String, select, func +from sqlalchemy import Integer, String, select, func, bindparam +from sqlalchemy import testing from ..schema import Table, Column @@ -20,12 +21,12 @@ class OrderByLabelTest(fixtures.TablesTest): @classmethod def define_tables(cls, metadata): Table("some_table", metadata, - Column('id', Integer, primary_key=True), - Column('x', Integer), - Column('y', Integer), - Column('q', String(50)), - Column('p', String(50)) - ) + Column('id', Integer, primary_key=True), + Column('x', Integer), + Column('y', Integer), + Column('q', String(50)), + Column('p', String(50)) + ) @classmethod def insert_data(cls): @@ -84,3 +85,108 @@ def test_composed_int_desc(self): select([lx]).order_by(lx.desc()), [(7, ), (5, ), (3, )] ) + + def test_group_by_composed(self): + table = self.tables.some_table + expr = (table.c.x + table.c.y).label('lx') + stmt = select([func.count(table.c.id), expr]).group_by(expr).order_by(expr) + self._assert_result( + stmt, + [(1, 3), (1, 5), (1, 7)] + ) + + +class LimitOffsetTest(fixtures.TablesTest): + __backend__ = True + + @classmethod + def define_tables(cls, metadata): + Table("some_table", metadata, + Column('id', Integer, primary_key=True), + Column('x', Integer), + Column('y', Integer)) + + @classmethod + def insert_data(cls): + config.db.execute( + cls.tables.some_table.insert(), + [ + {"id": 1, "x": 1, "y": 2}, + {"id": 2, "x": 2, "y": 3}, + {"id": 3, "x": 3, "y": 4}, + {"id": 4, "x": 4, "y": 5}, + ] + ) + + def _assert_result(self, select, result, params=()): + eq_( + config.db.execute(select, params).fetchall(), + result + ) + + def test_simple_limit(self): + table = self.tables.some_table + self._assert_result( + select([table]).order_by(table.c.id).limit(2), + [(1, 1, 2), (2, 2, 3)] + ) + + @testing.requires.offset + def test_simple_offset(self): + table = self.tables.some_table + self._assert_result( + select([table]).order_by(table.c.id).offset(2), + [(3, 3, 4), (4, 4, 5)] + ) + + @testing.requires.offset + def test_simple_limit_offset(self): + table = self.tables.some_table + self._assert_result( + select([table]).order_by(table.c.id).limit(2).offset(1), + [(2, 2, 3), (3, 3, 4)] + ) + + @testing.requires.offset + def test_limit_offset_nobinds(self): + """test that 'literal binds' mode works - no bound params.""" + + table = self.tables.some_table + stmt = select([table]).order_by(table.c.id).limit(2).offset(1) + sql = stmt.compile( + dialect=config.db.dialect, + compile_kwargs={"literal_binds": True}) + sql = str(sql) + + self._assert_result( + sql, + [(2, 2, 3), (3, 3, 4)] + ) + + @testing.requires.bound_limit_offset + def test_bound_limit(self): + table = self.tables.some_table + self._assert_result( + select([table]).order_by(table.c.id).limit(bindparam('l')), + [(1, 1, 2), (2, 2, 3)], + params={"l": 2} + ) + + @testing.requires.bound_limit_offset + def test_bound_offset(self): + table = self.tables.some_table + self._assert_result( + select([table]).order_by(table.c.id).offset(bindparam('o')), + [(3, 3, 4), (4, 4, 5)], + params={"o": 2} + ) + + @testing.requires.bound_limit_offset + def test_bound_limit_offset(self): + table = self.tables.some_table + self._assert_result( + select([table]).order_by(table.c.id). + limit(bindparam("l")).offset(bindparam("o")), + [(2, 2, 3), (3, 3, 4)], + params={"l": 2, "o": 1} + ) diff --git a/lib/sqlalchemy/testing/suite/test_sequence.py b/lib/sqlalchemy/testing/suite/test_sequence.py index 6bc2822fcf..b2d52f27cc 100644 --- a/lib/sqlalchemy/testing/suite/test_sequence.py +++ b/lib/sqlalchemy/testing/suite/test_sequence.py @@ -7,6 +7,7 @@ from ..schema import Table, Column + class SequenceTest(fixtures.TablesTest): __requires__ = ('sequences',) __backend__ = True @@ -16,15 +17,15 @@ class SequenceTest(fixtures.TablesTest): @classmethod def define_tables(cls, metadata): Table('seq_pk', metadata, - Column('id', Integer, Sequence('tab_id_seq'), primary_key=True), - Column('data', String(50)) - ) + Column('id', Integer, Sequence('tab_id_seq'), primary_key=True), + Column('data', String(50)) + ) Table('seq_opt_pk', metadata, - Column('id', Integer, Sequence('tab_id_seq', optional=True), - primary_key=True), - Column('data', String(50)) - ) + Column('id', Integer, Sequence('tab_id_seq', optional=True), + primary_key=True), + Column('data', String(50)) + ) def test_insert_roundtrip(self): config.db.execute( @@ -62,7 +63,6 @@ def test_optional_seq(self): [1] ) - def _assert_round_trip(self, table, conn): row = conn.execute(table.select()).first() eq_( @@ -80,17 +80,17 @@ def test_has_sequence(self): testing.db.execute(schema.CreateSequence(s1)) try: eq_(testing.db.dialect.has_sequence(testing.db, - 'user_id_seq'), True) + 'user_id_seq'), True) finally: testing.db.execute(schema.DropSequence(s1)) @testing.requires.schemas def test_has_sequence_schema(self): - s1 = Sequence('user_id_seq', schema="test_schema") + s1 = Sequence('user_id_seq', schema=config.test_schema) testing.db.execute(schema.CreateSequence(s1)) try: - eq_(testing.db.dialect.has_sequence(testing.db, - 'user_id_seq', schema="test_schema"), True) + eq_(testing.db.dialect.has_sequence( + testing.db, 'user_id_seq', schema=config.test_schema), True) finally: testing.db.execute(schema.DropSequence(s1)) @@ -101,7 +101,7 @@ def test_has_sequence_neg(self): @testing.requires.schemas def test_has_sequence_schemas_neg(self): eq_(testing.db.dialect.has_sequence(testing.db, 'user_id_seq', - schema="test_schema"), + schema=config.test_schema), False) @testing.requires.schemas @@ -110,19 +110,17 @@ def test_has_sequence_default_not_in_remote(self): testing.db.execute(schema.CreateSequence(s1)) try: eq_(testing.db.dialect.has_sequence(testing.db, 'user_id_seq', - schema="test_schema"), + schema=config.test_schema), False) finally: testing.db.execute(schema.DropSequence(s1)) @testing.requires.schemas def test_has_sequence_remote_not_in_default(self): - s1 = Sequence('user_id_seq', schema="test_schema") + s1 = Sequence('user_id_seq', schema=config.test_schema) testing.db.execute(schema.CreateSequence(s1)) try: eq_(testing.db.dialect.has_sequence(testing.db, 'user_id_seq'), False) finally: testing.db.execute(schema.DropSequence(s1)) - - diff --git a/lib/sqlalchemy/testing/suite/test_types.py b/lib/sqlalchemy/testing/suite/test_types.py index 5d8005f4bd..230aeb1e93 100644 --- a/lib/sqlalchemy/testing/suite/test_types.py +++ b/lib/sqlalchemy/testing/suite/test_types.py @@ -5,7 +5,7 @@ from ..config import requirements from sqlalchemy import Integer, Unicode, UnicodeText, select from sqlalchemy import Date, DateTime, Time, MetaData, String, \ - Text, Numeric, Float, literal, Boolean + Text, Numeric, Float, literal, Boolean from ..schema import Table, Column from ... import testing import decimal @@ -20,7 +20,7 @@ def _literal_round_trip(self, type_, input_, output, filter_=None): """test literal rendering """ # for literal, we test the literal render in an INSERT - # into a typed column. we can then SELECT it back as it's + # into a typed column. we can then SELECT it back as its # official type; ideally we'd be able to use CAST here # but MySQL in particular can't CAST fully t = Table('t', self.metadata, Column('x', type_)) @@ -28,9 +28,9 @@ def _literal_round_trip(self, type_, input_, output, filter_=None): for value in input_: ins = t.insert().values(x=literal(value)).compile( - dialect=testing.db.dialect, - compile_kwargs=dict(literal_binds=True) - ) + dialect=testing.db.dialect, + compile_kwargs=dict(literal_binds=True) + ) testing.db.execute(ins) for row in t.select().execute(): @@ -43,17 +43,17 @@ def _literal_round_trip(self, type_, input_, output, filter_=None): class _UnicodeFixture(_LiteralRoundTripFixture): __requires__ = 'unicode_data', - data = u("Alors vous imaginez ma surprise, au lever du jour, "\ - "quand une drôle de petite voix m’a réveillé. Elle "\ - "disait: « S’il vous plaît… dessine-moi un mouton! »") + data = u("Alors vous imaginez ma surprise, au lever du jour, " + "quand une drôle de petite voix m’a réveillé. Elle " + "disait: « S’il vous plaît… dessine-moi un mouton! »") @classmethod def define_tables(cls, metadata): Table('unicode_table', metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('unicode_data', cls.datatype), - ) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('unicode_data', cls.datatype), + ) def test_round_trip(self): unicode_table = self.tables.unicode_table @@ -66,10 +66,10 @@ def test_round_trip(self): ) row = config.db.execute( - select([ - unicode_table.c.unicode_data, - ]) - ).first() + select([ + unicode_table.c.unicode_data, + ]) + ).first() eq_( row, @@ -91,10 +91,10 @@ def test_round_trip_executemany(self): ) rows = config.db.execute( - select([ - unicode_table.c.unicode_data, - ]) - ).fetchall() + select([ + unicode_table.c.unicode_data, + ]) + ).fetchall() eq_( rows, [(self.data, ) for i in range(3)] @@ -110,8 +110,8 @@ def _test_empty_strings(self): {"unicode_data": u('')} ) row = config.db.execute( - select([unicode_table.c.unicode_data]) - ).first() + select([unicode_table.c.unicode_data]) + ).first() eq_(row, (u(''),)) def test_literal(self): @@ -139,6 +139,7 @@ class UnicodeTextTest(_UnicodeFixture, fixtures.TablesTest): def test_empty_strings_text(self): self._test_empty_strings() + class TextTest(_LiteralRoundTripFixture, fixtures.TablesTest): __requires__ = 'text_type', __backend__ = True @@ -146,10 +147,10 @@ class TextTest(_LiteralRoundTripFixture, fixtures.TablesTest): @classmethod def define_tables(cls, metadata): Table('text_table', metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('text_data', Text), - ) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('text_data', Text), + ) def test_text_roundtrip(self): text_table = self.tables.text_table @@ -159,8 +160,8 @@ def test_text_roundtrip(self): {"text_data": 'some text'} ) row = config.db.execute( - select([text_table.c.text_data]) - ).first() + select([text_table.c.text_data]) + ).first() eq_(row, ('some text',)) def test_text_empty_strings(self): @@ -171,8 +172,8 @@ def test_text_empty_strings(self): {"text_data": ''} ) row = config.db.execute( - select([text_table.c.text_data]) - ).first() + select([text_table.c.text_data]) + ).first() eq_(row, ('',)) def test_literal(self): @@ -186,6 +187,7 @@ def test_literal_backslashes(self): data = r'backslash one \ backslash two \\ end' self._literal_round_trip(Text, [data], [data]) + class StringTest(_LiteralRoundTripFixture, fixtures.TestBase): __backend__ = True @@ -194,7 +196,7 @@ def test_nolength_string(self): metadata = MetaData() foo = Table('foo', metadata, Column('one', String) - ) + ) foo.create(config.db) foo.drop(config.db) @@ -208,7 +210,7 @@ def test_literal_quoting(self): def test_literal_backslashes(self): data = r'backslash one \ backslash two \\ end' - self._literal_round_trip(Text, [data], [data]) + self._literal_round_trip(String(40), [data], [data]) class _DateFixture(_LiteralRoundTripFixture): @@ -217,10 +219,10 @@ class _DateFixture(_LiteralRoundTripFixture): @classmethod def define_tables(cls, metadata): Table('date_table', metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('date_data', cls.datatype), - ) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('date_data', cls.datatype), + ) def test_round_trip(self): date_table = self.tables.date_table @@ -231,10 +233,10 @@ def test_round_trip(self): ) row = config.db.execute( - select([ - date_table.c.date_data, - ]) - ).first() + select([ + date_table.c.date_data, + ]) + ).first() compare = self.compare or self.data eq_(row, @@ -250,10 +252,10 @@ def test_null(self): ) row = config.db.execute( - select([ - date_table.c.date_data, - ]) - ).first() + select([ + date_table.c.date_data, + ]) + ).first() eq_(row, (None,)) @testing.requires.datetime_literals @@ -262,7 +264,6 @@ def test_literal(self): self._literal_round_trip(self.datatype, [self.data], [compare]) - class DateTimeTest(_DateFixture, fixtures.TablesTest): __requires__ = 'datetime', __backend__ = True @@ -322,19 +323,22 @@ class DateHistoricTest(_DateFixture, fixtures.TablesTest): class IntegerTest(_LiteralRoundTripFixture, fixtures.TestBase): __backend__ = True + def test_literal(self): self._literal_round_trip(Integer, [5], [5]) + class NumericTest(_LiteralRoundTripFixture, fixtures.TestBase): __backend__ = True @testing.emits_warning(r".*does \*not\* support Decimal objects natively") @testing.provide_metadata - def _do_test(self, type_, input_, output, filter_=None, check_scale=False): + def _do_test(self, type_, input_, output, + filter_=None, check_scale=False): metadata = self.metadata t = Table('t', metadata, Column('x', type_)) t.create() - t.insert().execute([{'x':x} for x in input_]) + t.insert().execute([{'x': x} for x in input_]) result = set([row[0] for row in t.select().execute()]) output = set(output) @@ -348,7 +352,6 @@ def _do_test(self, type_, input_, output, filter_=None, check_scale=False): [str(x) for x in output], ) - @testing.emits_warning(r".*does \*not\* support Decimal objects natively") def test_render_literal_numeric(self): self._literal_round_trip( @@ -369,17 +372,16 @@ def test_render_literal_float(self): self._literal_round_trip( Float(4), [15.7563, decimal.Decimal("15.7563")], - [15.7563,], + [15.7563, ], filter_=lambda n: n is not None and round(n, 5) or None ) - @testing.requires.precision_generic_float_type def test_float_custom_scale(self): self._do_test( Float(None, decimal_return_scale=7, asdecimal=True), [15.7563827, decimal.Decimal("15.7563827")], - [decimal.Decimal("15.7563827"),], + [decimal.Decimal("15.7563827"), ], check_scale=True ) @@ -421,7 +423,6 @@ def test_float_as_decimal(self): [decimal.Decimal("15.7563"), None], ) - def test_float_as_float(self): self._do_test( Float(precision=8), @@ -430,7 +431,6 @@ def test_float_as_float(self): filter_=lambda n: n is not None and round(n, 5) or None ) - @testing.requires.precision_numerics_general def test_precision_decimal(self): numbers = set([ @@ -445,7 +445,6 @@ def test_precision_decimal(self): numbers, ) - @testing.requires.precision_numerics_enotation_large def test_enotation_decimal(self): """test exceedingly small decimals. @@ -475,7 +474,6 @@ def test_enotation_decimal(self): numbers ) - @testing.requires.precision_numerics_enotation_large def test_enotation_decimal_large(self): """test exceedingly large decimals. @@ -526,10 +524,10 @@ class BooleanTest(_LiteralRoundTripFixture, fixtures.TablesTest): @classmethod def define_tables(cls, metadata): Table('boolean_table', metadata, - Column('id', Integer, primary_key=True, autoincrement=False), - Column('value', Boolean), - Column('unconstrained_value', Boolean(create_constraint=False)), - ) + Column('id', Integer, primary_key=True, autoincrement=False), + Column('value', Boolean), + Column('unconstrained_value', Boolean(create_constraint=False)), + ) def test_render_literal_bool(self): self._literal_round_trip( @@ -551,11 +549,11 @@ def test_round_trip(self): ) row = config.db.execute( - select([ - boolean_table.c.value, - boolean_table.c.unconstrained_value - ]) - ).first() + select([ + boolean_table.c.value, + boolean_table.c.unconstrained_value + ]) + ).first() eq_( row, @@ -576,11 +574,11 @@ def test_null(self): ) row = config.db.execute( - select([ - boolean_table.c.value, - boolean_table.c.unconstrained_value - ]) - ).first() + select([ + boolean_table.c.value, + boolean_table.c.unconstrained_value + ]) + ).first() eq_( row, @@ -588,11 +586,9 @@ def test_null(self): ) - - __all__ = ('UnicodeVarcharTest', 'UnicodeTextTest', - 'DateTest', 'DateTimeTest', 'TextTest', - 'NumericTest', 'IntegerTest', - 'DateTimeHistoricTest', 'DateTimeCoercedToDateTimeTest', - 'TimeMicrosecondsTest', 'TimeTest', 'DateTimeMicrosecondsTest', - 'DateHistoricTest', 'StringTest', 'BooleanTest') + 'DateTest', 'DateTimeTest', 'TextTest', + 'NumericTest', 'IntegerTest', + 'DateTimeHistoricTest', 'DateTimeCoercedToDateTimeTest', + 'TimeMicrosecondsTest', 'TimeTest', 'DateTimeMicrosecondsTest', + 'DateHistoricTest', 'StringTest', 'BooleanTest') diff --git a/lib/sqlalchemy/testing/suite/test_update_delete.py b/lib/sqlalchemy/testing/suite/test_update_delete.py index 88dc95355f..e4c61e74a4 100644 --- a/lib/sqlalchemy/testing/suite/test_update_delete.py +++ b/lib/sqlalchemy/testing/suite/test_update_delete.py @@ -12,18 +12,18 @@ class SimpleUpdateDeleteTest(fixtures.TablesTest): @classmethod def define_tables(cls, metadata): Table('plain_pk', metadata, - Column('id', Integer, primary_key=True), - Column('data', String(50)) - ) + Column('id', Integer, primary_key=True), + Column('data', String(50)) + ) @classmethod def insert_data(cls): config.db.execute( cls.tables.plain_pk.insert(), [ - {"id":1, "data":"d1"}, - {"id":2, "data":"d2"}, - {"id":3, "data":"d3"}, + {"id": 1, "data": "d1"}, + {"id": 2, "data": "d2"}, + {"id": 3, "data": "d3"}, ] ) diff --git a/lib/sqlalchemy/testing/util.py b/lib/sqlalchemy/testing/util.py index bde11a356a..754e2ad923 100644 --- a/lib/sqlalchemy/testing/util.py +++ b/lib/sqlalchemy/testing/util.py @@ -1,5 +1,6 @@ # testing/util.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -60,8 +61,8 @@ def round_decimal(value, prec): # can also use shift() here but that is 2.6 only return (value * decimal.Decimal("1" + "0" * prec) - ).to_integral(decimal.ROUND_FLOOR) / \ - pow(10, prec) + ).to_integral(decimal.ROUND_FLOOR) / \ + pow(10, prec) class RandomSet(set): @@ -137,7 +138,7 @@ def function_named(fn, name): fn.__name__ = name except TypeError: fn = types.FunctionType(fn.__code__, fn.__globals__, name, - fn.__defaults__, fn.__closure__) + fn.__defaults__, fn.__closure__) return fn @@ -146,6 +147,10 @@ def run_as_contextmanager(ctx, fn, *arg, **kw): simulating the behavior of 'with' to support older Python versions. + This is not necessary anymore as we have placed 2.6 + as minimum Python version, however some tests are still using + this structure. + """ obj = ctx.__enter__() @@ -180,6 +185,7 @@ def provide_metadata(fn, *args, **kw): """Provide bound MetaData for a single test, dropping afterwards.""" from . import config + from . import engines from sqlalchemy import schema metadata = schema.MetaData(config.db) @@ -189,17 +195,86 @@ def provide_metadata(fn, *args, **kw): try: return fn(*args, **kw) finally: - metadata.drop_all() + engines.drop_all_tables(metadata, config.db) self.metadata = prev_meta +def force_drop_names(*names): + """Force the given table names to be dropped after test complete, + isolating for foreign key cycles + + """ + from . import config + from sqlalchemy import inspect + + @decorator + def go(fn, *args, **kw): + + try: + return fn(*args, **kw) + finally: + drop_all_tables( + config.db, inspect(config.db), include_names=names) + return go + + class adict(dict): """Dict keys available as attributes. Shadows.""" + def __getattribute__(self, key): try: return self[key] except KeyError: return dict.__getattribute__(self, key) - def get_all(self, *keys): + def __call__(self, *keys): return tuple([self[key] for key in keys]) + + get_all = __call__ + + +def drop_all_tables(engine, inspector, schema=None, include_names=None): + from sqlalchemy import Column, Table, Integer, MetaData, \ + ForeignKeyConstraint + from sqlalchemy.schema import DropTable, DropConstraint + + if include_names is not None: + include_names = set(include_names) + + with engine.connect() as conn: + for tname, fkcs in reversed( + inspector.get_sorted_table_and_fkc_names(schema=schema)): + if tname: + if include_names is not None and tname not in include_names: + continue + conn.execute(DropTable( + Table(tname, MetaData(), schema=schema) + )) + elif fkcs: + if not engine.dialect.supports_alter: + continue + for tname, fkc in fkcs: + if include_names is not None and \ + tname not in include_names: + continue + tb = Table( + tname, MetaData(), + Column('x', Integer), + Column('y', Integer), + schema=schema + ) + conn.execute(DropConstraint( + ForeignKeyConstraint( + [tb.c.x], [tb.c.y], name=fkc) + )) + + +def teardown_events(event_cls): + @decorator + def decorate(fn, *arg, **kw): + try: + return fn(*arg, **kw) + finally: + event_cls._clear() + return decorate + diff --git a/lib/sqlalchemy/testing/warnings.py b/lib/sqlalchemy/testing/warnings.py index 849b1b5b49..de372dcc4f 100644 --- a/lib/sqlalchemy/testing/warnings.py +++ b/lib/sqlalchemy/testing/warnings.py @@ -1,5 +1,6 @@ # testing/warnings.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -8,24 +9,11 @@ import warnings from .. import exc as sa_exc -from .. import util -import re +from . import assertions -def testing_warn(msg, stacklevel=3): - """Replaces sqlalchemy.util.warn during tests.""" - filename = "sqlalchemy.testing.warnings" - lineno = 1 - if isinstance(msg, util.string_types): - warnings.warn_explicit(msg, sa_exc.SAWarning, filename, lineno) - else: - warnings.warn_explicit(msg, filename, lineno) - - -def resetwarnings(): - """Reset warning behavior to testing defaults.""" - - util.warn = util.langhelpers.warn = testing_warn +def setup_filters(): + """Set global warning behavior for the test suite.""" warnings.filterwarnings('ignore', category=sa_exc.SAPendingDeprecationWarning) @@ -33,24 +21,14 @@ def resetwarnings(): warnings.filterwarnings('error', category=sa_exc.SAWarning) -def assert_warnings(fn, warnings, regex=False): - """Assert that each of the given warnings are emitted by fn.""" +def assert_warnings(fn, warning_msgs, regex=False): + """Assert that each of the given warnings are emitted by fn. - from .assertions import eq_, emits_warning + Deprecated. Please use assertions.expect_warnings(). - canary = [] - orig_warn = util.warn + """ - def capture_warnings(*args, **kw): - orig_warn(*args, **kw) - popwarn = warnings.pop(0) - canary.append(popwarn) - if regex: - assert re.match(popwarn, args[0]) - else: - eq_(args[0], popwarn) - util.warn = util.langhelpers.warn = capture_warnings + with assertions._expect_warnings( + sa_exc.SAWarning, warning_msgs, regex=regex): + return fn() - result = emits_warning()(fn)() - assert canary, "No warning was emitted" - return result diff --git a/lib/sqlalchemy/types.py b/lib/sqlalchemy/types.py index 3994bd4a8b..44ed696d60 100644 --- a/lib/sqlalchemy/types.py +++ b/lib/sqlalchemy/types.py @@ -1,21 +1,22 @@ # types.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -"""Compatiblity namespace for sqlalchemy.sql.types. +"""Compatibility namespace for sqlalchemy.sql.types. """ __all__ = ['TypeEngine', 'TypeDecorator', 'UserDefinedType', - 'INT', 'CHAR', 'VARCHAR', 'NCHAR', 'NVARCHAR', 'TEXT', 'Text', - 'FLOAT', 'NUMERIC', 'REAL', 'DECIMAL', 'TIMESTAMP', 'DATETIME', - 'CLOB', 'BLOB', 'BINARY', 'VARBINARY', 'BOOLEAN', 'BIGINT', - 'SMALLINT', 'INTEGER', 'DATE', 'TIME', 'String', 'Integer', - 'SmallInteger', 'BigInteger', 'Numeric', 'Float', 'DateTime', - 'Date', 'Time', 'LargeBinary', 'Binary', 'Boolean', 'Unicode', - 'Concatenable', 'UnicodeText', 'PickleType', 'Interval', 'Enum'] + 'INT', 'CHAR', 'VARCHAR', 'NCHAR', 'NVARCHAR', 'TEXT', 'Text', + 'FLOAT', 'NUMERIC', 'REAL', 'DECIMAL', 'TIMESTAMP', 'DATETIME', + 'CLOB', 'BLOB', 'BINARY', 'VARBINARY', 'BOOLEAN', 'BIGINT', + 'SMALLINT', 'INTEGER', 'DATE', 'TIME', 'String', 'Integer', + 'SmallInteger', 'BigInteger', 'Numeric', 'Float', 'DateTime', + 'Date', 'Time', 'LargeBinary', 'Binary', 'Boolean', 'Unicode', + 'Concatenable', 'UnicodeText', 'PickleType', 'Interval', 'Enum'] from .sql.type_api import ( adapt_type, @@ -50,6 +51,7 @@ Integer, Interval, LargeBinary, + MatchType, NCHAR, NVARCHAR, NullType, @@ -74,4 +76,3 @@ VARCHAR, _type_map ) - diff --git a/lib/sqlalchemy/util/__init__.py b/lib/sqlalchemy/util/__init__.py index eba64ed15d..8dcec48c3a 100644 --- a/lib/sqlalchemy/util/__init__.py +++ b/lib/sqlalchemy/util/__init__.py @@ -1,25 +1,27 @@ # util/__init__.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php from .compat import callable, cmp, reduce, \ - threading, py3k, py33, py2k, jython, pypy, cpython, win32, \ + threading, py3k, py33, py36, py2k, jython, pypy, cpython, win32, \ pickle, dottedgetter, parse_qsl, namedtuple, next, reraise, \ - raise_from_cause, text_type, string_types, int_types, binary_type, \ + raise_from_cause, text_type, safe_kwarg, string_types, int_types, \ + binary_type, nested, \ quote_plus, with_metaclass, print_, itertools_filterfalse, u, ue, b,\ unquote_plus, unquote, b64decode, b64encode, byte_buffer, itertools_filter,\ - iterbytes, StringIO, inspect_getargspec + iterbytes, StringIO, inspect_getargspec, zip_longest from ._collections import KeyedTuple, ImmutableContainer, immutabledict, \ Properties, OrderedProperties, ImmutableProperties, OrderedDict, \ OrderedSet, IdentitySet, OrderedIdentitySet, column_set, \ column_dict, ordered_column_set, populate_column_dict, unique_list, \ UniqueAppender, PopulateDict, EMPTY_SET, to_list, to_set, \ - to_column_set, update_copy, flatten_iterator, \ + to_column_set, update_copy, flatten_iterator, has_intersection, \ LRUCache, ScopedRegistry, ThreadLocalRegistry, WeakSequence, \ - coerce_generator_arg + coerce_generator_arg, lightweight_named_tuple from .langhelpers import iterate_attributes, class_hierarchy, \ portable_instancemethod, unbound_method_to_callable, \ @@ -31,8 +33,10 @@ duck_type_collection, assert_arg_type, symbol, dictlike_iteritems,\ classproperty, set_creation_order, warn_exception, warn, NoneType,\ constructor_copy, methods_equivalent, chop_traceback, asint,\ - generic_repr, counter, PluginLoader, hybridmethod, safe_reraise,\ - get_callable_argspec, only_once + generic_repr, counter, PluginLoader, hybridproperty, hybridmethod, \ + safe_reraise,\ + get_callable_argspec, only_once, attrsetter, ellipses_string, \ + warn_limited, map_bits, MemoizedSlots, EnsureKWArgType from .deprecations import warn_deprecated, warn_pending_deprecation, \ deprecated, pending_deprecation, inject_docstring_text diff --git a/lib/sqlalchemy/util/_collections.py b/lib/sqlalchemy/util/_collections.py index c0a24ba4f5..c29b81f6a1 100644 --- a/lib/sqlalchemy/util/_collections.py +++ b/lib/sqlalchemy/util/_collections.py @@ -1,5 +1,6 @@ # util/_collections.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -9,14 +10,31 @@ from __future__ import absolute_import import weakref import operator -from .compat import threading, itertools_filterfalse +from .compat import threading, itertools_filterfalse, string_types, \ + binary_types from . import py2k import types +import collections EMPTY_SET = frozenset() -class KeyedTuple(tuple): +class AbstractKeyedTuple(tuple): + __slots__ = () + + def keys(self): + """Return a list of string key names for this :class:`.KeyedTuple`. + + .. seealso:: + + :attr:`.KeyedTuple._fields` + + """ + + return list(self._fields) + + +class KeyedTuple(AbstractKeyedTuple): """``tuple`` subclass that adds labeled names. E.g.:: @@ -55,23 +73,13 @@ class to return rows. def __new__(cls, vals, labels=None): t = tuple.__new__(cls, vals) - t._labels = [] if labels: t.__dict__.update(zip(labels, vals)) - t._labels = labels + else: + labels = [] + t.__dict__['_labels'] = labels return t - def keys(self): - """Return a list of string key names for this :class:`.KeyedTuple`. - - .. seealso:: - - :attr:`.KeyedTuple._fields` - - """ - - return [l for l in self._labels if l is not None] - @property def _fields(self): """Return a tuple of string key names for this :class:`.KeyedTuple`. @@ -85,7 +93,10 @@ def _fields(self): :meth:`.KeyedTuple.keys` """ - return tuple(self.keys()) + return tuple([l for l in self._labels if l is not None]) + + def __setattr__(self, key, value): + raise AttributeError("Can't set attribute: %s" % key) def _asdict(self): """Return the contents of this :class:`.KeyedTuple` as a dictionary. @@ -99,6 +110,26 @@ def _asdict(self): return dict((key, self.__dict__[key]) for key in self.keys()) +class _LW(AbstractKeyedTuple): + __slots__ = () + + def __new__(cls, vals): + return tuple.__new__(cls, vals) + + def __reduce__(self): + # for pickling, degrade down to the regular + # KeyedTuple, thus avoiding anonymous class pickling + # difficulties + return KeyedTuple, (list(self), self._real_fields) + + def _asdict(self): + """Return the contents of this :class:`.KeyedTuple` as a dictionary.""" + + d = dict(zip(self._real_fields, self)) + d.pop(None, None) + return d + + class ImmutableContainer(object): def _immutable(self, *arg, **kw): raise TypeError("%s object is immutable" % self.__class__.__name__) @@ -123,8 +154,13 @@ def __reduce__(self): return immutabledict, (dict(self), ) def union(self, d): - if not self: - return immutabledict(d) + if not d: + return self + elif not self: + if isinstance(d, immutabledict): + return d + else: + return immutabledict(d) else: d2 = immutabledict(self) dict.update(d2, d) @@ -137,8 +173,10 @@ def __repr__(self): class Properties(object): """Provide a __getattr__/__setattr__ interface over a dict.""" + __slots__ = '_data', + def __init__(self, data): - self.__dict__['_data'] = data + object.__setattr__(self, '_data', data) def __len__(self): return len(self._data) @@ -158,8 +196,8 @@ def __getitem__(self, key): def __delitem__(self, key): del self._data[key] - def __setattr__(self, key, object): - self._data[key] = object + def __setattr__(self, key, obj): + self._data[key] = obj def __getstate__(self): return {'_data': self.__dict__['_data']} @@ -209,6 +247,9 @@ def clear(self): class OrderedProperties(Properties): """Provide a __getattr__/__setattr__ interface with an OrderedDict as backing store.""" + + __slots__ = () + def __init__(self): Properties.__init__(self, OrderedDict()) @@ -216,10 +257,17 @@ def __init__(self): class ImmutableProperties(ImmutableContainer, Properties): """Provide immutable dict/object attribute to an underlying dictionary.""" + __slots__ = () + class OrderedDict(dict): """A dict that returns keys/values/items in the order they were added.""" + __slots__ = '_list', + + def __reduce__(self): + return OrderedDict, (self.items(),) + def __init__(self, ____sequence=None, **kwargs): self._list = [] if ____sequence is None: @@ -262,16 +310,18 @@ def setdefault(self, key, value): def __iter__(self): return iter(self._list) + def keys(self): + return list(self) - if py2k: - def values(self): - return [self[key] for key in self._list] + def values(self): + return [self[key] for key in self._list] - def keys(self): - return self._list + def items(self): + return [(key, self[key]) for key in self._list] + if py2k: def itervalues(self): - return iter([self[key] for key in self._list]) + return iter(self.values()) def iterkeys(self): return iter(self) @@ -279,39 +329,6 @@ def iterkeys(self): def iteritems(self): return iter(self.items()) - def items(self): - return [(key, self[key]) for key in self._list] - else: - def values(self): - #return (self[key] for key in self) - return (self[key] for key in self._list) - - def keys(self): - #return iter(self) - return iter(self._list) - - def items(self): - #return ((key, self[key]) for key in self) - return ((key, self[key]) for key in self._list) - - _debug_iter = False - if _debug_iter: - # normally disabled to reduce function call - # overhead - def __iter__(self): - len_ = len(self._list) - for item in self._list: - yield item - assert len_ == len(self._list), \ - "Dictionary changed size during iteration" - def values(self): - return (self[key] for key in self) - def keys(self): - return iter(self) - def items(self): - return ((key, self[key]) for key in self) - - def __setitem__(self, key, object): if key not in self: try: @@ -344,7 +361,10 @@ def __init__(self, d=None): set.__init__(self) self._list = [] if d is not None: - self.update(d) + self._list = unique_list(d) + set.update(self, self._list) + else: + self._list = [] def add(self, element): if element not in self: @@ -505,7 +525,7 @@ def issubset(self, iterable): if len(self) > len(other): return False for m in itertools_filterfalse(other._members.__contains__, - iter(self._members.keys())): + iter(self._members.keys())): return False return True @@ -526,7 +546,7 @@ def issuperset(self, iterable): return False for m in itertools_filterfalse(self._members.__contains__, - iter(other._members.keys())): + iter(other._members.keys())): return False return True @@ -667,7 +687,7 @@ def __len__(self): def __iter__(self): return (obj for obj in - (ref() for ref in self._storage) if obj is not None) + (ref() for ref in self._storage) if obj is not None) def __getitem__(self, index): try: @@ -718,16 +738,24 @@ def __missing__(self, key): ordered_column_set = OrderedSet populate_column_dict = PopulateDict + +_getters = PopulateDict(operator.itemgetter) + +_property_getters = PopulateDict( + lambda idx: property(operator.itemgetter(idx))) + + def unique_list(seq, hashfunc=None): - seen = {} + seen = set() + seen_add = seen.add if not hashfunc: return [x for x in seq if x not in seen - and not seen.__setitem__(x, True)] + and not seen_add(x)] else: return [x for x in seq if hashfunc(x) not in seen - and not seen.__setitem__(hashfunc(x), True)] + and not seen_add(hashfunc(x))] class UniqueAppender(object): @@ -756,19 +784,37 @@ def append(self, item): def __iter__(self): return iter(self.data) + def coerce_generator_arg(arg): if len(arg) == 1 and isinstance(arg[0], types.GeneratorType): return list(arg[0]) else: return arg + def to_list(x, default=None): if x is None: return default - if not isinstance(x, (list, tuple)): + if not isinstance(x, collections.Iterable) or \ + isinstance(x, string_types + binary_types): return [x] - else: + elif isinstance(x, list): return x + else: + return list(x) + + +def has_intersection(set_, iterable): + """return True if any items of set_ are present in iterable. + + Goes through special effort to ensure __hash__ is not called + on items in iterable that don't support it. + + """ + # TODO: optimize, write in C, etc. + return bool( + set_.intersection([i for i in iterable if i.__hash__]) + ) def to_set(x): @@ -816,16 +862,30 @@ class LRUCache(dict): """Dictionary with 'squishy' removal of least recently used items. + Note that either get() or [] should be used here, but + generally its not safe to do an "in" check first as the dictionary + can change subsequent to that call. + """ + def __init__(self, capacity=100, threshold=.5): self.capacity = capacity self.threshold = threshold self._counter = 0 + self._mutex = threading.Lock() def _inc_counter(self): self._counter += 1 return self._counter + def get(self, key, default=None): + item = dict.get(self, key, default) + if item is not default: + item[2] = self._inc_counter() + return item[1] + else: + return default + def __getitem__(self, key): item = dict.__getitem__(self, key) item[2] = self._inc_counter() @@ -851,18 +911,45 @@ def __setitem__(self, key, value): self._manage_size() def _manage_size(self): - while len(self) > self.capacity + self.capacity * self.threshold: - by_counter = sorted(dict.values(self), - key=operator.itemgetter(2), - reverse=True) - for item in by_counter[self.capacity:]: - try: - del self[item[0]] - except KeyError: - # if we couldnt find a key, most - # likely some other thread broke in - # on us. loop around and try again - break + if not self._mutex.acquire(False): + return + try: + while len(self) > self.capacity + self.capacity * self.threshold: + by_counter = sorted(dict.values(self), + key=operator.itemgetter(2), + reverse=True) + for item in by_counter[self.capacity:]: + try: + del self[item[0]] + except KeyError: + # deleted elsewhere; skip + continue + finally: + self._mutex.release() + + +_lw_tuples = LRUCache(100) + + +def lightweight_named_tuple(name, fields): + hash_ = (name, ) + tuple(fields) + tp_cls = _lw_tuples.get(hash_) + if tp_cls: + return tp_cls + + tp_cls = type( + name, (_LW,), + dict([ + (field, _property_getters[idx]) + for idx, field in enumerate(fields) if field is not None + ] + [('__slots__', ())]) + ) + + tp_cls._real_fields = fields + tp_cls._fields = tuple([f for f in fields if f is not None]) + + _lw_tuples[hash_] = tp_cls + return tp_cls class ScopedRegistry(object): @@ -908,7 +995,7 @@ def has(self): return self.scopefunc() in self.registry def set(self, obj): - """Set the value forthe current scope.""" + """Set the value for the current scope.""" self.registry[self.scopefunc()] = obj @@ -926,6 +1013,7 @@ class ThreadLocalRegistry(ScopedRegistry): variable for storage. """ + def __init__(self, createfunc): self.createfunc = createfunc self.registry = threading.local() diff --git a/lib/sqlalchemy/util/compat.py b/lib/sqlalchemy/util/compat.py index f1346406e6..ee4a20f9bf 100644 --- a/lib/sqlalchemy/util/compat.py +++ b/lib/sqlalchemy/util/compat.py @@ -1,5 +1,6 @@ # util/compat.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -13,10 +14,12 @@ except ImportError: import dummy_threading as threading +py36 = sys.version_info >= (3, 6) py33 = sys.version_info >= (3, 3) py32 = sys.version_info >= (3, 2) py3k = sys.version_info >= (3, 0) py2k = sys.version_info < (3, 0) +py265 = sys.version_info >= (2, 6, 5) jython = sys.platform.startswith('java') pypy = hasattr(sys, 'pypy_version_info') win32 = sys.platform.startswith('win') @@ -33,14 +36,21 @@ except ImportError: import pickle +# work around http://bugs.python.org/issue2646 +if py265: + safe_kwarg = lambda arg: arg +else: + safe_kwarg = str + ArgSpec = collections.namedtuple("ArgSpec", - ["args", "varargs", "keywords", "defaults"]) + ["args", "varargs", "keywords", "defaults"]) if py3k: import builtins from inspect import getfullargspec as inspect_getfullargspec - from urllib.parse import quote_plus, unquote_plus, parse_qsl, quote, unquote + from urllib.parse import (quote_plus, unquote_plus, + parse_qsl, quote, unquote) import configparser from io import StringIO @@ -48,10 +58,11 @@ def inspect_getargspec(func): return ArgSpec( - *inspect_getfullargspec(func)[0:4] - ) + *inspect_getfullargspec(func)[0:4] + ) string_types = str, + binary_types = bytes, binary_type = bytes text_type = str int_types = int, @@ -85,10 +96,13 @@ def cmp(a, b): itertools_filterfalse = itertools.filterfalse itertools_filter = filter itertools_imap = map + from itertools import zip_longest import base64 + def b64encode(x): return base64.b64encode(x).decode('ascii') + def b64decode(x): return base64.b64decode(x.encode('ascii')) @@ -102,9 +116,11 @@ def b64decode(x): from cStringIO import StringIO as byte_buffer string_types = basestring, + binary_types = bytes, binary_type = str text_type = unicode int_types = int, long + def iterbytes(buf): return (ord(byte) for byte in buf) @@ -147,6 +163,7 @@ def print_(*args, **kwargs): itertools_filterfalse = itertools.ifilterfalse itertools_filter = itertools.ifilter itertools_imap = itertools.imap + from itertools import izip_longest as zip_longest import time @@ -162,27 +179,27 @@ def print_(*args, **kwargs): if py3k: def reraise(tp, value, tb=None, cause=None): if cause is not None: + assert cause is not value, "Same cause emitted" value.__cause__ = cause if value.__traceback__ is not tb: raise value.with_traceback(tb) raise value - def raise_from_cause(exception, exc_info=None): - if exc_info is None: - exc_info = sys.exc_info() - exc_type, exc_value, exc_tb = exc_info - reraise(type(exception), exception, tb=exc_tb, cause=exc_value) else: + # not as nice as that of Py3K, but at least preserves + # the code line where the issue occurred exec("def reraise(tp, value, tb=None, cause=None):\n" - " raise tp, value, tb\n") + " if cause is not None:\n" + " assert cause is not value, 'Same cause emitted'\n" + " raise tp, value, tb\n") - def raise_from_cause(exception, exc_info=None): - # not as nice as that of Py3K, but at least preserves - # the code line where the issue occurred - if exc_info is None: - exc_info = sys.exc_info() - exc_type, exc_value, exc_tb = exc_info - reraise(type(exception), exception, tb=exc_tb) + +def raise_from_cause(exception, exc_info=None): + if exc_info is None: + exc_info = sys.exc_info() + exc_type, exc_value, exc_tb = exc_info + cause = exc_value if exc_value is not exception else None + reraise(type(exception), exception, tb=exc_tb, cause=cause) if py3k: exec_ = getattr(builtins, 'exec') @@ -206,6 +223,7 @@ def with_metaclass(meta, *bases): class metaclass(meta): __call__ = type.__call__ __init__ = type.__init__ + def __new__(cls, name, this_bases, d): if this_bases is None: return type.__new__(cls, name, (), d) @@ -213,3 +231,35 @@ def __new__(cls, name, this_bases, d): return metaclass('temporary_class', None, {}) +from contextlib import contextmanager + +try: + from contextlib import nested +except ImportError: + # removed in py3k, credit to mitsuhiko for + # workaround + + @contextmanager + def nested(*managers): + exits = [] + vars = [] + exc = (None, None, None) + try: + for mgr in managers: + exit = mgr.__exit__ + enter = mgr.__enter__ + vars.append(enter()) + exits.append(exit) + yield vars + except: + exc = sys.exc_info() + finally: + while exits: + exit = exits.pop() + try: + if exit(*exc): + exc = (None, None, None) + except: + exc = sys.exc_info() + if exc != (None, None, None): + reraise(exc[0], exc[1], exc[2]) diff --git a/lib/sqlalchemy/util/deprecations.py b/lib/sqlalchemy/util/deprecations.py index c8854dc32b..12fa63602a 100644 --- a/lib/sqlalchemy/util/deprecations.py +++ b/lib/sqlalchemy/util/deprecations.py @@ -1,5 +1,6 @@ # util/deprecations.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -37,7 +38,7 @@ def deprecated(version, message=None, add_deprecation_to_docstring=True): if add_deprecation_to_docstring: header = ".. deprecated:: %s %s" % \ - (version, (message or '')) + (version, (message or '')) else: header = None @@ -71,7 +72,7 @@ def pending_deprecation(version, message=None, if add_deprecation_to_docstring: header = ".. deprecated:: %s (pending) %s" % \ - (version, (message or '')) + (version, (message or '')) else: header = None @@ -101,7 +102,7 @@ def _decorate_with_warning(func, wtype, message, docstring_header=None): @decorator def warned(fn, *args, **kwargs): - warnings.warn(wtype(message), stacklevel=3) + warnings.warn(message, wtype, stacklevel=3) return fn(*args, **kwargs) doc = func.__doc__ is not None and func.__doc__ or '' @@ -116,6 +117,7 @@ def warned(fn, *args, **kwargs): import textwrap + def _dedent_docstring(text): split_text = text.split("\n", 1) if len(split_text) == 1: @@ -127,6 +129,7 @@ def _dedent_docstring(text): else: return textwrap.dedent(text) + def inject_docstring_text(doctext, injecttext, pos): doctext = _dedent_docstring(doctext or "") lines = doctext.split('\n') diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py index 8a1164e77f..0318d1e049 100644 --- a/lib/sqlalchemy/util/langhelpers.py +++ b/lib/sqlalchemy/util/langhelpers.py @@ -1,5 +1,6 @@ # util/langhelpers.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -21,6 +22,7 @@ from . import compat from . import _collections + def md5_hex(x): if compat.py3k: x = x.encode('utf-8') @@ -28,6 +30,7 @@ def md5_hex(x): m.update(x) return m.hexdigest() + class safe_reraise(object): """Reraise an exception after invoking some handler code. @@ -56,9 +59,17 @@ def __exit__(self, type_, value, traceback): self._exc_info = None # remove potential circular references compat.reraise(exc_type, exc_value, exc_tb) else: + if not compat.py3k and self._exc_info and self._exc_info[1]: + # emulate Py3K's behavior of telling us when an exception + # occurs in an exception handler. + warn( + "An exception has occurred during handling of a " + "previous exception. The previous exception " + "is:\n %s %s\n" % (self._exc_info[0], self._exc_info[1])) self._exc_info = None # remove potential circular references compat.reraise(type_, value, traceback) + def decode_slice(slc): """decode a slice object as sent to __getitem__. @@ -72,12 +83,13 @@ def decode_slice(slc): ret.append(x) return tuple(ret) + def _unique_symbols(used, *bases): used = set(used) for base in bases: pool = itertools.chain((base,), compat.itertools_imap(lambda i: base + str(i), - range(1000))) + range(1000))) for sym in pool: if sym not in used: used.add(sym) @@ -87,6 +99,15 @@ def _unique_symbols(used, *bases): raise NameError("exhausted namespace for symbol base %s" % base) +def map_bits(fn, n): + """Call the given function given each nonzero bit from n.""" + + while n: + b = n & (~n + 1) + yield fn(b) + n ^= b + + def decorator(target): """A signature-matching decorator factory.""" @@ -105,17 +126,19 @@ def %(name)s(%(args)s): return %(target)s(%(fn)s, %(apply_kw)s) """ % metadata decorated = _exec_code_in_env(code, - {targ_name: target, fn_name: fn}, - fn.__name__) + {targ_name: target, fn_name: fn}, + fn.__name__) decorated.__defaults__ = getattr(fn, 'im_func', fn).__defaults__ decorated.__wrapped__ = fn return update_wrapper(decorated, fn) return update_wrapper(decorate, target) + def _exec_code_in_env(code, env, fn_name): exec(code, env) return env[fn_name] + def public_factory(target, location): """Produce a wrapping function for the given cls or classmethod. @@ -127,13 +150,14 @@ class can serve as documentation for the function. fn = target.__init__ callable_ = target doc = "Construct a new :class:`.%s` object. \n\n"\ - "This constructor is mirrored as a public API function; see :func:`~%s` "\ - "for a full usage and argument description." % ( - target.__name__, location, ) + "This constructor is mirrored as a public API function; "\ + "see :func:`~%s` "\ + "for a full usage and argument description." % ( + target.__name__, location, ) else: fn = callable_ = target doc = "This function is mirrored; see :func:`~%s` "\ - "for a description of arguments." % location + "for a description of arguments." % location location_name = location.split(".")[-1] spec = compat.inspect_getfullargspec(fn) @@ -148,6 +172,7 @@ def %(name)s(%(args)s): exec(code, env) decorated = env[location_name] decorated.__doc__ = fn.__doc__ + decorated.__module__ = "sqlalchemy" + location.rsplit(".", 1)[0] if compat.py2k or hasattr(fn, '__func__'): fn.__func__.__doc__ = doc else: @@ -178,13 +203,13 @@ def load(self, name): pass else: for impl in pkg_resources.iter_entry_points( - self.group, name): + self.group, name): self.impls[name] = impl.load return impl.load() raise exc.NoSuchModuleError( - "Can't load plugin: %s:%s" % - (self.group, name)) + "Can't load plugin: %s:%s" % + (self.group, name)) def register(self, name, modulepath, objname): def load(): @@ -199,15 +224,15 @@ def get_cls_kwargs(cls, _set=None): """Return the full set of inherited kwargs for the given `cls`. Probes a class's __init__ method, collecting all named arguments. If the - __init__ defines a \**kwargs catch-all, then the constructor is presumed to - pass along unrecognized keywords to it's base classes, and the collection - process is repeated recursively on each of the bases. + __init__ defines a \**kwargs catch-all, then the constructor is presumed + to pass along unrecognized keywords to its base classes, and the + collection process is repeated recursively on each of the bases. Uses a subset of inspect.getargspec() to cut down on method overhead. No anonymous tuple arguments please ! """ - toplevel = _set == None + toplevel = _set is None if toplevel: _set = set() @@ -232,7 +257,6 @@ def get_cls_kwargs(cls, _set=None): return _set - try: # TODO: who doesn't have this constant? from inspect import CO_VARKEYWORDS @@ -261,6 +285,7 @@ def get_func_kwargs(func): return compat.inspect_getargspec(func)[0] + def get_callable_argspec(fn, no_self=False, _is_init=False): """Return the argument signature for any callable. @@ -276,18 +301,19 @@ def get_callable_argspec(fn, no_self=False, _is_init=False): if _is_init and no_self: spec = compat.inspect_getargspec(fn) return compat.ArgSpec(spec.args[1:], spec.varargs, - spec.keywords, spec.defaults) + spec.keywords, spec.defaults) else: return compat.inspect_getargspec(fn) elif inspect.ismethod(fn): if no_self and (_is_init or fn.__self__): spec = compat.inspect_getargspec(fn.__func__) return compat.ArgSpec(spec.args[1:], spec.varargs, - spec.keywords, spec.defaults) + spec.keywords, spec.defaults) else: return compat.inspect_getargspec(fn.__func__) elif inspect.isclass(fn): - return get_callable_argspec(fn.__init__, no_self=no_self, _is_init=True) + return get_callable_argspec( + fn.__init__, no_self=no_self, _is_init=True) elif hasattr(fn, '__func__'): return compat.inspect_getargspec(fn.__func__) elif hasattr(fn, '__call__'): @@ -298,6 +324,7 @@ def get_callable_argspec(fn, no_self=False, _is_init=False): else: raise TypeError("Can't inspect callable: %s" % fn) + def format_argspec_plus(fn, grouped=True): """Returns a dictionary of formatted, introspected function arguments. @@ -345,7 +372,7 @@ def format_argspec_plus(fn, grouped=True): if compat.py3k: apply_pos = inspect.formatargspec(spec[0], spec[1], - spec[2], None, spec[4]) + spec[2], None, spec[4]) num_defaults = 0 if spec[3]: num_defaults += len(spec[3]) @@ -365,7 +392,7 @@ def format_argspec_plus(fn, grouped=True): defaulted_vals = () apply_kw = inspect.formatargspec(name_args, spec[1], spec[2], - defaulted_vals, + defaulted_vals, formatvalue=lambda x: '=' + x) if grouped: return dict(args=args, self_arg=self_arg, @@ -392,7 +419,7 @@ def format_argspec_init(method, grouped=True): return format_argspec_plus(method, grouped=grouped) except TypeError: args = (grouped and '(self, *args, **kwargs)' - or 'self, *args, **kwargs') + or 'self, *args, **kwargs') return dict(self_arg='self', args=args, apply_pos=args, apply_kw=args) @@ -406,7 +433,7 @@ def getargspec_init(method): """ try: - return inspect.getargspec(method) + return compat.inspect_getargspec(method) except TypeError: if method is object.__init__: return (['self'], None, None, None) @@ -426,7 +453,7 @@ def unbound_method_to_callable(func_or_cls): return func_or_cls -def generic_repr(obj, additional_kw=(), to_inspect=None): +def generic_repr(obj, additional_kw=(), to_inspect=None, omit_kwarg=()): """Produce a __repr__() based on direct association of the __init__() specification vs. same-named attributes present. @@ -444,7 +471,7 @@ def generic_repr(obj, additional_kw=(), to_inspect=None): for i, insp in enumerate(to_inspect): try: (_args, _vargs, vkw, defaults) = \ - inspect.getargspec(insp.__init__) + compat.inspect_getargspec(insp.__init__) except TypeError: continue else: @@ -464,8 +491,8 @@ def generic_repr(obj, additional_kw=(), to_inspect=None): if default_len: kw_args.update([ (arg, default) - for arg, default - in zip(_args[-default_len:], defaults) + for arg, default + in zip(_args[-default_len:], defaults) ]) output = [] @@ -475,11 +502,13 @@ def generic_repr(obj, additional_kw=(), to_inspect=None): output.extend([repr(val) for val in getattr(obj, vargs)]) for arg, defval in kw_args.items(): + if arg in omit_kwarg: + continue try: val = getattr(obj, arg, missing) if val is not missing and val != defval: output.append('%s=%r' % (arg, val)) - except: + except Exception: pass if additional_kw: @@ -488,7 +517,7 @@ def generic_repr(obj, additional_kw=(), to_inspect=None): val = getattr(obj, arg, missing) if val is not missing and val != defval: output.append('%s=%r' % (arg, val)) - except: + except Exception: pass return "%s(%s)" % (obj.__class__.__name__, ", ".join(output)) @@ -499,6 +528,16 @@ class portable_instancemethod(object): to produce a serializable callable. """ + + __slots__ = 'target', 'name', '__weakref__' + + def __getstate__(self): + return {'target': self.target, 'name': self.name} + + def __setstate__(self, state): + self.target = state['target'] + self.name = state['name'] + def __init__(self, meth): self.target = meth.__self__ self.name = meth.__name__ @@ -532,7 +571,7 @@ class systemwide that derives from object. if isinstance(c, types.ClassType): continue bases = (_ for _ in c.__bases__ - if _ not in hier and not isinstance(_, types.ClassType)) + if _ not in hier and not isinstance(_, types.ClassType)) else: bases = (_ for _ in c.__bases__ if _ not in hier) @@ -544,7 +583,8 @@ class systemwide that derives from object. if c.__module__ == 'builtins' or not hasattr(c, '__subclasses__'): continue else: - if c.__module__ == '__builtin__' or not hasattr(c, '__subclasses__'): + if c.__module__ == '__builtin__' or not hasattr( + c, '__subclasses__'): continue for s in [_ for _ in c.__subclasses__() if _ not in hier]: @@ -592,7 +632,7 @@ def monkeypatch_proxied_specials(into_cls, from_cls, skip=None, only=None, except AttributeError: continue try: - spec = inspect.getargspec(fn) + spec = compat.inspect_getargspec(fn) fn_args = inspect.formatargspec(spec[0]) d_args = inspect.formatargspec(spec[0][1:]) except TypeError: @@ -614,7 +654,8 @@ def monkeypatch_proxied_specials(into_cls, from_cls, skip=None, only=None, def methods_equivalent(meth1, meth2): """Return True if the two methods are the same implementation.""" - return getattr(meth1, '__func__', meth1) is getattr(meth2, '__func__', meth2) + return getattr(meth1, '__func__', meth1) is getattr( + meth2, '__func__', meth2) def as_interface(obj, cls=None, methods=None, required=None): @@ -672,7 +713,7 @@ def as_interface(obj, cls=None, methods=None, required=None): return obj # No dict duck typing here. - if not type(obj) is dict: + if not isinstance(obj, dict): qualifier = complies is operator.gt and 'any of' or 'all of' raise TypeError("%r does not implement %s: %s" % ( obj, qualifier, ', '.join(interface))) @@ -701,6 +742,7 @@ class AnonymousInterface(object): class memoized_property(object): """A read-only @property that is only evaluated once.""" + def __init__(self, fget, doc=None): self.fget = fget self.__doc__ = doc or fget.__doc__ @@ -720,7 +762,7 @@ def reset(cls, obj, name): obj.__dict__.pop(name, None) -class memoized_instancemethod(object): +def memoized_instancemethod(fn): """Decorate a method memoize its return value. Best applied to no-arg methods: memoization is not sensitive to @@ -728,26 +770,15 @@ class memoized_instancemethod(object): called with different arguments. """ - def __init__(self, fget, doc=None): - self.fget = fget - self.__doc__ = doc or fget.__doc__ - self.__name__ = fget.__name__ - - def __get__(self, obj, cls): - if obj is None: - return self - def oneshot(*args, **kw): - result = self.fget(obj, *args, **kw) - memo = lambda *a, **kw: result - memo.__name__ = self.__name__ - memo.__doc__ = self.__doc__ - obj.__dict__[self.__name__] = memo - return result - - oneshot.__name__ = self.__name__ - oneshot.__doc__ = self.__doc__ - return oneshot + def oneshot(self, *args, **kw): + result = fn(self, *args, **kw) + memo = lambda *a, **kw: result + memo.__name__ = fn.__name__ + memo.__doc__ = fn.__doc__ + self.__dict__[fn.__name__] = memo + return result + return update_wrapper(oneshot, fn) class group_expirable_memoized_property(object): @@ -773,18 +804,55 @@ def method(self, fn): return memoized_instancemethod(fn) +class MemoizedSlots(object): + """Apply memoized items to an object using a __getattr__ scheme. + + This allows the functionality of memoized_property and + memoized_instancemethod to be available to a class using __slots__. + + """ + + __slots__ = () + + def _fallback_getattr(self, key): + raise AttributeError(key) + + def __getattr__(self, key): + if key.startswith('_memoized'): + raise AttributeError(key) + elif hasattr(self, '_memoized_attr_%s' % key): + value = getattr(self, '_memoized_attr_%s' % key)() + setattr(self, key, value) + return value + elif hasattr(self, '_memoized_method_%s' % key): + fn = getattr(self, '_memoized_method_%s' % key) + + def oneshot(*args, **kw): + result = fn(*args, **kw) + memo = lambda *a, **kw: result + memo.__name__ = fn.__name__ + memo.__doc__ = fn.__doc__ + setattr(self, key, memo) + return result + oneshot.__doc__ = fn.__doc__ + return oneshot + else: + return self._fallback_getattr(key) + def dependency_for(modulename): def decorate(obj): # TODO: would be nice to improve on this import silliness, # unfortunately importlib doesn't work that great either tokens = modulename.split(".") - mod = compat.import_(".".join(tokens[0:-1]), globals(), locals(), tokens[-1]) + mod = compat.import_( + ".".join(tokens[0:-1]), globals(), locals(), tokens[-1]) mod = getattr(mod, tokens[-1]) setattr(mod, obj.__name__, obj) return obj return decorate + class dependencies(object): """Apply imported dependencies as arguments to a function. @@ -808,7 +876,7 @@ def __init__(self, *deps): for dep in deps: tokens = dep.split(".") self.import_deps.append( - dependencies._importlater( + dependencies._importlater( ".".join(tokens[0:-1]), tokens[-1] ) @@ -833,8 +901,8 @@ def __call__(self, fn): outer_spec = format_argspec_plus(spec, grouped=False) code = 'lambda %(args)s: fn(%(apply_kw)s)' % { - "args": outer_spec['args'], - "apply_kw": inner_spec['apply_kw'] + "args": outer_spec['args'], + "apply_kw": inner_spec['apply_kw'] } decorated = eval(code, locals()) @@ -868,7 +936,6 @@ def __init__(self, path, addtl): self._il_addtl = addtl dependencies._unresolved.add(self) - @property def _full_path(self): return self._il_path + "." + self._il_addtl @@ -877,29 +944,29 @@ def _full_path(self): def module(self): if self in dependencies._unresolved: raise ImportError( - "importlater.resolve_all() hasn't " - "been called (this is %s %s)" - % (self._il_path, self._il_addtl)) + "importlater.resolve_all() hasn't " + "been called (this is %s %s)" + % (self._il_path, self._il_addtl)) return getattr(self._initial_import, self._il_addtl) def _resolve(self): dependencies._unresolved.discard(self) self._initial_import = compat.import_( - self._il_path, globals(), locals(), - [self._il_addtl]) + self._il_path, globals(), locals(), + [self._il_addtl]) def __getattr__(self, key): if key == 'module': raise ImportError("Could not resolve module %s" - % self._full_path) + % self._full_path) try: attr = getattr(self.module, key) except AttributeError: raise AttributeError( - "Module %s has no attribute '%s'" % - (self._full_path, key) - ) + "Module %s has no attribute '%s'" % + (self._full_path, key) + ) self.__dict__[key] = attr return attr @@ -918,7 +985,7 @@ def asbool(obj): def bool_or_str(*text): - """Return a callable that will evaulate a string as + """Return a callable that will evaluate a string as boolean, or one of a set of "alternate" string values. """ @@ -944,14 +1011,14 @@ def coerce_kw_type(kw, key, type_, flexi_bool=True): when coercing to boolean. """ - if key in kw and type(kw[key]) is not type_ and kw[key] is not None: + if key in kw and not isinstance(kw[key], type_) and kw[key] is not None: if type_ is bool and flexi_bool: kw[key] = asbool(kw[key]) else: kw[key] = type_(kw[key]) -def constructor_copy(obj, cls, **kw): +def constructor_copy(obj, cls, *args, **kw): """Instantiate cls using the __dict__ of obj as constructor arguments. Uses inspect to match the named arguments of ``cls``. @@ -960,7 +1027,7 @@ def constructor_copy(obj, cls, **kw): names = get_cls_kwargs(cls) kw.update((k, obj.__dict__[k]) for k in names if k in obj.__dict__) - return cls(**kw) + return cls(*args, **kw) def counter(): @@ -1074,9 +1141,23 @@ def __get__(desc, self, cls): return desc.fget(cls) +class hybridproperty(object): + def __init__(self, func): + self.func = func + + def __get__(self, instance, owner): + if instance is None: + clsval = self.func(owner) + clsval.__doc__ = self.func.__doc__ + return clsval + else: + return self.func(instance) + + class hybridmethod(object): """Decorate a function as cls- or instance- level.""" - def __init__(self, func, expr=None): + + def __init__(self, func): self.func = func def __get__(self, instance, owner): @@ -1168,28 +1249,62 @@ def warn_exception(func, *args, **kwargs): """ try: return func(*args, **kwargs) - except: + except Exception: warn("%s('%s') ignored" % sys.exc_info()[0:2]) -def warn(msg, stacklevel=3): +def ellipses_string(value, len_=25): + try: + if len(value) > len_: + return "%s..." % value[0:len_] + else: + return value + except TypeError: + return value + + +class _hash_limit_string(compat.text_type): + """A string subclass that can only be hashed on a maximum amount + of unique values. + + This is used for warnings so that we can send out parameterized warnings + without the __warningregistry__ of the module, or the non-overridable + "once" registry within warnings.py, overloading memory, + + + """ + def __new__(cls, value, num, args): + interpolated = (value % args) + \ + (" (this warning may be suppressed after %d occurrences)" % num) + self = super(_hash_limit_string, cls).__new__(cls, interpolated) + self._hash = hash("%s_%d" % (value, hash(interpolated) % num)) + return self + + def __hash__(self): + return self._hash + + def __eq__(self, other): + return hash(self) == hash(other) + + +def warn(msg): """Issue a warning. If msg is a string, :class:`.exc.SAWarning` is used as the category. - .. note:: + """ + warnings.warn(msg, exc.SAWarning, stacklevel=2) + - This function is swapped out when the test suite - runs, with a compatible version that uses - warnings.warn_explicit, so that the warnings registry can - be controlled. +def warn_limited(msg, args): + """Issue a warning with a paramterized string, limiting the number + of registrations. """ - if isinstance(msg, compat.string_types): - warnings.warn(msg, exc.SAWarning, stacklevel=stacklevel) - else: - warnings.warn(msg, stacklevel=stacklevel) + if args: + msg = _hash_limit_string(msg, 10, args) + warnings.warn(msg, exc.SAWarning, stacklevel=2) def only_once(fn): @@ -1197,6 +1312,7 @@ def only_once(fn): once.""" once = [fn] + def go(*arg, **kw): if once: once_fn = once.pop() @@ -1208,6 +1324,7 @@ def go(*arg, **kw): _SQLA_RE = re.compile(r'sqlalchemy/([a-z_]+/){0,2}[a-z_]+\.py') _UNITTEST_RE = re.compile(r'unit(?:2|test2?/)') + def chop_traceback(tb, exclude_prefix=_UNITTEST_RE, exclude_suffix=_SQLA_RE): """Chop extraneous lines off beginning and end of a traceback. @@ -1215,7 +1332,8 @@ def chop_traceback(tb, exclude_prefix=_UNITTEST_RE, exclude_suffix=_SQLA_RE): a list of traceback lines as returned by ``traceback.format_stack()`` :param exclude_prefix: - a regular expression object matching lines to skip at beginning of ``tb`` + a regular expression object matching lines to skip at beginning of + ``tb`` :param exclude_suffix: a regular expression object matching lines to skip at end of ``tb`` @@ -1229,3 +1347,38 @@ def chop_traceback(tb, exclude_prefix=_UNITTEST_RE, exclude_suffix=_SQLA_RE): return tb[start:end + 1] NoneType = type(None) + + +def attrsetter(attrname): + code = \ + "def set(obj, value):"\ + " obj.%s = value" % attrname + env = locals().copy() + exec(code, env) + return env['set'] + + +class EnsureKWArgType(type): + """Apply translation of functions to accept **kw arguments if they + don't already. + + """ + def __init__(cls, clsname, bases, clsdict): + fn_reg = cls.ensure_kwarg + if fn_reg: + for key in clsdict: + m = re.match(fn_reg, key) + if m: + fn = clsdict[key] + spec = compat.inspect_getargspec(fn) + if not spec.keywords: + clsdict[key] = wrapped = cls._wrap_w_kw(fn) + setattr(cls, key, wrapped) + super(EnsureKWArgType, cls).__init__(clsname, bases, clsdict) + + def _wrap_w_kw(self, fn): + + def wrap(*arg, **kw): + return fn(*arg) + return update_wrapper(wrap, fn) + diff --git a/lib/sqlalchemy/util/queue.py b/lib/sqlalchemy/util/queue.py index c98aa7fdab..221347158b 100644 --- a/lib/sqlalchemy/util/queue.py +++ b/lib/sqlalchemy/util/queue.py @@ -1,5 +1,6 @@ # util/queue.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -57,7 +58,6 @@ def __init__(self, maxsize=0): # a thread waiting to put is notified then. self.not_full = threading.Condition(self.mutex) - def qsize(self): """Return the approximate size of the queue (not reliable!).""" diff --git a/lib/sqlalchemy/util/topological.py b/lib/sqlalchemy/util/topological.py index fe7e768965..5c5c54c53b 100644 --- a/lib/sqlalchemy/util/topological.py +++ b/lib/sqlalchemy/util/topological.py @@ -1,5 +1,6 @@ # util/topological.py -# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors +# # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -12,38 +13,41 @@ __all__ = ['sort', 'sort_as_subsets', 'find_cycles'] -def sort_as_subsets(tuples, allitems): +def sort_as_subsets(tuples, allitems, deterministic_order=False): edges = util.defaultdict(set) for parent, child in tuples: edges[child].add(parent) - todo = set(allitems) + Set = util.OrderedSet if deterministic_order else set + + todo = Set(allitems) while todo: - output = set() - for node in list(todo): - if not todo.intersection(edges[node]): + output = Set() + for node in todo: + if todo.isdisjoint(edges[node]): output.add(node) if not output: raise CircularDependencyError( - "Circular dependency detected.", - find_cycles(tuples, allitems), - _gen_edges(edges) - ) + "Circular dependency detected.", + find_cycles(tuples, allitems), + _gen_edges(edges) + ) todo.difference_update(output) yield output -def sort(tuples, allitems): +def sort(tuples, allitems, deterministic_order=False): """sort the given list of items by dependency. 'tuples' is a list of tuples representing a partial ordering. + 'deterministic_order' keeps items within a dependency tier in list order. """ - for set_ in sort_as_subsets(tuples, allitems): + for set_ in sort_as_subsets(tuples, allitems, deterministic_order): for s in set_: yield s @@ -90,7 +94,7 @@ def find_cycles(tuples, allitems): def _gen_edges(edges): return set([ - (right, left) - for left in edges - for right in edges[left] - ]) + (right, left) + for left in edges + for right in edges[left] + ]) diff --git a/lib/stevedore/__init__.py b/lib/stevedore/__init__.py index 93a56b2e54..a471f31ded 100644 --- a/lib/stevedore/__init__.py +++ b/lib/stevedore/__init__.py @@ -20,17 +20,5 @@ # the app we're used from does not set up logging. LOG = logging.getLogger('stevedore') -if hasattr(logging, 'NullHandler'): - LOG.addHandler(logging.NullHandler()) -else: - class NullHandler(logging.Handler): - def handle(self, record): - pass +LOG.addHandler(logging.NullHandler()) - def emit(self, record): - pass - - def createLock(self): - self.lock = None - - LOG.addHandler(NullHandler()) diff --git a/lib/stevedore/tests/manager.py b/lib/stevedore/tests/manager.py index 28c3732176..df13af0f55 100644 --- a/lib/stevedore/tests/manager.py +++ b/lib/stevedore/tests/manager.py @@ -3,15 +3,11 @@ Extension manager used only for testing. """ -import logging import warnings from stevedore import extension -LOG = logging.getLogger(__name__) - - class TestExtensionManager(extension.ExtensionManager): """ExtensionManager that is explicitly initialized for tests. diff --git a/lib/tornado/__init__.py b/lib/tornado/__init__.py index 5588295e49..85bacc7e95 100644 --- a/lib/tornado/__init__.py +++ b/lib/tornado/__init__.py @@ -25,5 +25,5 @@ # is zero for an official release, positive for a development branch, # or negative for a release candidate or beta (after the base version # number has been incremented) -version = "4.2.1" -version_info = (4, 2, 1, 0) +version = "4.3" +version_info = (4, 3, 0, 0) diff --git a/lib/tornado/_locale_data.py b/lib/tornado/_locale_data.py new file mode 100644 index 0000000000..47c1df618c --- /dev/null +++ b/lib/tornado/_locale_data.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python +# coding: utf-8 +# +# Copyright 2012 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Data used by the tornado.locale module.""" + +from __future__ import absolute_import, division, print_function, with_statement + +# NOTE: This file is supposed to contain unicode strings, which is +# exactly what you'd get with e.g. u"Español" in most python versions. +# However, Python 3.2 doesn't support the u"" syntax, so we use a u() +# function instead. tornado.util.u cannot be used because it doesn't +# support non-ascii characters on python 2. +# When we drop support for Python 3.2, we can remove the parens +# and make these plain unicode strings. +from tornado.escape import to_unicode as u + +LOCALE_NAMES = { + "af_ZA": {"name_en": u("Afrikaans"), "name": u("Afrikaans")}, + "am_ET": {"name_en": u("Amharic"), "name": u("አማርኛ")}, + "ar_AR": {"name_en": u("Arabic"), "name": u("العربية")}, + "bg_BG": {"name_en": u("Bulgarian"), "name": u("Български")}, + "bn_IN": {"name_en": u("Bengali"), "name": u("বাংলা")}, + "bs_BA": {"name_en": u("Bosnian"), "name": u("Bosanski")}, + "ca_ES": {"name_en": u("Catalan"), "name": u("Català")}, + "cs_CZ": {"name_en": u("Czech"), "name": u("Čeština")}, + "cy_GB": {"name_en": u("Welsh"), "name": u("Cymraeg")}, + "da_DK": {"name_en": u("Danish"), "name": u("Dansk")}, + "de_DE": {"name_en": u("German"), "name": u("Deutsch")}, + "el_GR": {"name_en": u("Greek"), "name": u("Ελληνικά")}, + "en_GB": {"name_en": u("English (UK)"), "name": u("English (UK)")}, + "en_US": {"name_en": u("English (US)"), "name": u("English (US)")}, + "es_ES": {"name_en": u("Spanish (Spain)"), "name": u("Español (España)")}, + "es_LA": {"name_en": u("Spanish"), "name": u("Español")}, + "et_EE": {"name_en": u("Estonian"), "name": u("Eesti")}, + "eu_ES": {"name_en": u("Basque"), "name": u("Euskara")}, + "fa_IR": {"name_en": u("Persian"), "name": u("فارسی")}, + "fi_FI": {"name_en": u("Finnish"), "name": u("Suomi")}, + "fr_CA": {"name_en": u("French (Canada)"), "name": u("Français (Canada)")}, + "fr_FR": {"name_en": u("French"), "name": u("Français")}, + "ga_IE": {"name_en": u("Irish"), "name": u("Gaeilge")}, + "gl_ES": {"name_en": u("Galician"), "name": u("Galego")}, + "he_IL": {"name_en": u("Hebrew"), "name": u("עברית")}, + "hi_IN": {"name_en": u("Hindi"), "name": u("हिन्दी")}, + "hr_HR": {"name_en": u("Croatian"), "name": u("Hrvatski")}, + "hu_HU": {"name_en": u("Hungarian"), "name": u("Magyar")}, + "id_ID": {"name_en": u("Indonesian"), "name": u("Bahasa Indonesia")}, + "is_IS": {"name_en": u("Icelandic"), "name": u("Íslenska")}, + "it_IT": {"name_en": u("Italian"), "name": u("Italiano")}, + "ja_JP": {"name_en": u("Japanese"), "name": u("日本語")}, + "ko_KR": {"name_en": u("Korean"), "name": u("한국어")}, + "lt_LT": {"name_en": u("Lithuanian"), "name": u("Lietuvių")}, + "lv_LV": {"name_en": u("Latvian"), "name": u("Latviešu")}, + "mk_MK": {"name_en": u("Macedonian"), "name": u("Македонски")}, + "ml_IN": {"name_en": u("Malayalam"), "name": u("മലയാളം")}, + "ms_MY": {"name_en": u("Malay"), "name": u("Bahasa Melayu")}, + "nb_NO": {"name_en": u("Norwegian (bokmal)"), "name": u("Norsk (bokmål)")}, + "nl_NL": {"name_en": u("Dutch"), "name": u("Nederlands")}, + "nn_NO": {"name_en": u("Norwegian (nynorsk)"), "name": u("Norsk (nynorsk)")}, + "pa_IN": {"name_en": u("Punjabi"), "name": u("ਪੰਜਾਬੀ")}, + "pl_PL": {"name_en": u("Polish"), "name": u("Polski")}, + "pt_BR": {"name_en": u("Portuguese (Brazil)"), "name": u("Português (Brasil)")}, + "pt_PT": {"name_en": u("Portuguese (Portugal)"), "name": u("Português (Portugal)")}, + "ro_RO": {"name_en": u("Romanian"), "name": u("Română")}, + "ru_RU": {"name_en": u("Russian"), "name": u("Русский")}, + "sk_SK": {"name_en": u("Slovak"), "name": u("Slovenčina")}, + "sl_SI": {"name_en": u("Slovenian"), "name": u("Slovenščina")}, + "sq_AL": {"name_en": u("Albanian"), "name": u("Shqip")}, + "sr_RS": {"name_en": u("Serbian"), "name": u("Српски")}, + "sv_SE": {"name_en": u("Swedish"), "name": u("Svenska")}, + "sw_KE": {"name_en": u("Swahili"), "name": u("Kiswahili")}, + "ta_IN": {"name_en": u("Tamil"), "name": u("தமிழ்")}, + "te_IN": {"name_en": u("Telugu"), "name": u("తెలుగు")}, + "th_TH": {"name_en": u("Thai"), "name": u("ภาษาไทย")}, + "tl_PH": {"name_en": u("Filipino"), "name": u("Filipino")}, + "tr_TR": {"name_en": u("Turkish"), "name": u("Türkçe")}, + "uk_UA": {"name_en": u("Ukraini "), "name": u("Українська")}, + "vi_VN": {"name_en": u("Vietnamese"), "name": u("Tiếng Việt")}, + "zh_CN": {"name_en": u("Chinese (Simplified)"), "name": u("中文(简体)")}, + "zh_TW": {"name_en": u("Chinese (Traditional)"), "name": u("中文(繁體)")}, +} diff --git a/lib/tornado/auth.py b/lib/tornado/auth.py index 800b10afe4..ff7172aa01 100644 --- a/lib/tornado/auth.py +++ b/lib/tornado/auth.py @@ -75,7 +75,7 @@ def get(self): import time import uuid -from tornado.concurrent import TracebackFuture, return_future +from tornado.concurrent import TracebackFuture, return_future, chain_future from tornado import gen from tornado import httpclient from tornado import escape @@ -621,6 +621,72 @@ def _oauth_request_token_url(self, redirect_uri=None, client_id=None, args.update(extra_params) return url_concat(url, args) + @_auth_return_future + def oauth2_request(self, url, callback, access_token=None, + post_args=None, **args): + """Fetches the given URL auth an OAuth2 access token. + + If the request is a POST, ``post_args`` should be provided. Query + string arguments should be given as keyword arguments. + + Example usage: + + ..testcode:: + + class MainHandler(tornado.web.RequestHandler, + tornado.auth.FacebookGraphMixin): + @tornado.web.authenticated + @tornado.gen.coroutine + def get(self): + new_entry = yield self.oauth2_request( + "https://graph.facebook.com/me/feed", + post_args={"message": "I am posting from my Tornado application!"}, + access_token=self.current_user["access_token"]) + + if not new_entry: + # Call failed; perhaps missing permission? + yield self.authorize_redirect() + return + self.finish("Posted a message!") + + .. testoutput:: + :hide: + + .. versionadded:: 4.3 + """ + all_args = {} + if access_token: + all_args["access_token"] = access_token + all_args.update(args) + + if all_args: + url += "?" + urllib_parse.urlencode(all_args) + callback = functools.partial(self._on_oauth2_request, callback) + http = self.get_auth_http_client() + if post_args is not None: + http.fetch(url, method="POST", body=urllib_parse.urlencode(post_args), + callback=callback) + else: + http.fetch(url, callback=callback) + + def _on_oauth2_request(self, future, response): + if response.error: + future.set_exception(AuthError("Error response %s fetching %s" % + (response.error, response.request.url))) + return + + future.set_result(escape.json_decode(response.body)) + + def get_auth_http_client(self): + """Returns the `.AsyncHTTPClient` instance to be used for auth requests. + + May be overridden by subclasses to use an HTTP client other than + the default. + + .. versionadded:: 4.3 + """ + return httpclient.AsyncHTTPClient() + class TwitterMixin(OAuthMixin): """Twitter OAuth authentication. @@ -791,12 +857,21 @@ class GoogleOAuth2Mixin(OAuth2Mixin): """ _OAUTH_AUTHORIZE_URL = "https://accounts.google.com/o/oauth2/auth" _OAUTH_ACCESS_TOKEN_URL = "https://accounts.google.com/o/oauth2/token" + _OAUTH_USERINFO_URL = "https://www.googleapis.com/oauth2/v1/userinfo" _OAUTH_NO_CALLBACKS = False _OAUTH_SETTINGS_KEY = 'google_oauth' @_auth_return_future def get_authenticated_user(self, redirect_uri, code, callback): - """Handles the login for the Google user, returning a user object. + """Handles the login for the Google user, returning an access token. + + The result is a dictionary containing an ``access_token`` field + ([among others](https://developers.google.com/identity/protocols/OAuth2WebServer#handlingtheresponse)). + Unlike other ``get_authenticated_user`` methods in this package, + this method does not return any additional information about the user. + The returned access token can be used with `OAuth2Mixin.oauth2_request` + to request additional information (perhaps from + ``https://www.googleapis.com/oauth2/v2/userinfo``) Example usage: @@ -807,10 +882,14 @@ class GoogleOAuth2LoginHandler(tornado.web.RequestHandler, @tornado.gen.coroutine def get(self): if self.get_argument('code', False): - user = yield self.get_authenticated_user( + access = yield self.get_authenticated_user( redirect_uri='http://your.site.com/auth/google', code=self.get_argument('code')) - # Save the user with e.g. set_secure_cookie + user = yield self.oauth2_request( + "https://www.googleapis.com/oauth2/v1/userinfo", + access_token=access["access_token"]) + # Save the user and access token with + # e.g. set_secure_cookie. else: yield self.authorize_redirect( redirect_uri='http://your.site.com/auth/google', @@ -845,14 +924,6 @@ def _on_access_token(self, future, response): args = escape.json_decode(response.body) future.set_result(args) - def get_auth_http_client(self): - """Returns the `.AsyncHTTPClient` instance to be used for auth requests. - - May be overridden by subclasses to use an HTTP client other than - the default. - """ - return httpclient.AsyncHTTPClient() - class FacebookGraphMixin(OAuth2Mixin): """Facebook authentication using the new Graph API and OAuth2.""" @@ -914,7 +985,7 @@ def _on_access_token(self, redirect_uri, client_id, client_secret, future.set_exception(AuthError('Facebook auth error: %s' % str(response))) return - args = escape.parse_qs_bytes(escape.native_str(response.body)) + args = urlparse.parse_qs(escape.native_str(response.body)) session = { "access_token": args["access_token"][-1], "expires": args.get("expires") @@ -983,40 +1054,21 @@ def get(self): The given path is relative to ``self._FACEBOOK_BASE_URL``, by default "https://graph.facebook.com". + This method is a wrapper around `OAuth2Mixin.oauth2_request`; + the only difference is that this method takes a relative path, + while ``oauth2_request`` takes a complete url. + .. versionchanged:: 3.1 Added the ability to override ``self._FACEBOOK_BASE_URL``. """ url = self._FACEBOOK_BASE_URL + path - all_args = {} - if access_token: - all_args["access_token"] = access_token - all_args.update(args) - - if all_args: - url += "?" + urllib_parse.urlencode(all_args) - callback = functools.partial(self._on_facebook_request, callback) - http = self.get_auth_http_client() - if post_args is not None: - http.fetch(url, method="POST", body=urllib_parse.urlencode(post_args), - callback=callback) - else: - http.fetch(url, callback=callback) - - def _on_facebook_request(self, future, response): - if response.error: - future.set_exception(AuthError("Error response %s fetching %s" % - (response.error, response.request.url))) - return - - future.set_result(escape.json_decode(response.body)) - - def get_auth_http_client(self): - """Returns the `.AsyncHTTPClient` instance to be used for auth requests. - - May be overridden by subclasses to use an HTTP client other than - the default. - """ - return httpclient.AsyncHTTPClient() + # Thanks to the _auth_return_future decorator, our "callback" + # argument is a Future, which we cannot pass as a callback to + # oauth2_request. Instead, have oauth2_request return a + # future and chain them together. + oauth_future = self.oauth2_request(url, access_token=access_token, + post_args=post_args, **args) + chain_future(oauth_future, callback) def _oauth_signature(consumer_token, method, url, parameters={}, token=None): diff --git a/lib/tornado/autoreload.py b/lib/tornado/autoreload.py index a52ddde40d..1cbf26c6cb 100644 --- a/lib/tornado/autoreload.py +++ b/lib/tornado/autoreload.py @@ -289,11 +289,16 @@ def main(): runpy.run_module(module, run_name="__main__", alter_sys=True) elif mode == "script": with open(script) as f: + # Execute the script in our namespace instead of creating + # a new one so that something that tries to import __main__ + # (e.g. the unittest module) will see names defined in the + # script instead of just those defined in this module. global __file__ __file__ = script - # Use globals as our "locals" dictionary so that - # something that tries to import __main__ (e.g. the unittest - # module) will see the right things. + # If __package__ is defined, imports may be incorrectly + # interpreted as relative to this module. + global __package__ + del __package__ exec_in(f.read(), globals(), globals()) except SystemExit as e: logging.basicConfig() diff --git a/lib/tornado/concurrent.py b/lib/tornado/concurrent.py index 479ca022ef..5f8cdc4141 100644 --- a/lib/tornado/concurrent.py +++ b/lib/tornado/concurrent.py @@ -16,16 +16,16 @@ """Utilities for working with threads and ``Futures``. ``Futures`` are a pattern for concurrent programming introduced in -Python 3.2 in the `concurrent.futures` package (this package has also -been backported to older versions of Python and can be installed with -``pip install futures``). Tornado will use `concurrent.futures.Future` if -it is available; otherwise it will use a compatible class defined in this -module. +Python 3.2 in the `concurrent.futures` package. This package defines +a mostly-compatible `Future` class designed for use from coroutines, +as well as some utility functions for interacting with the +`concurrent.futures` package. """ from __future__ import absolute_import, division, print_function, with_statement import functools import platform +import textwrap import traceback import sys @@ -170,6 +170,23 @@ def __init__(self): self._callbacks = [] + # Implement the Python 3.5 Awaitable protocol if possible + # (we can't use return and yield together until py33). + if sys.version_info >= (3, 3): + exec(textwrap.dedent(""" + def __await__(self): + return (yield self) + """)) + else: + # Py2-compatible version for use with cython. + def __await__(self): + result = yield self + # StopIteration doesn't take args before py33, + # but Cython recognizes the args tuple. + e = StopIteration() + e.args = (result,) + raise e + def cancel(self): """Cancel the operation, if possible. @@ -365,6 +382,7 @@ def foo(self): def run_on_executor_decorator(fn): executor = kwargs.get("executor", "executor") io_loop = kwargs.get("io_loop", "io_loop") + @functools.wraps(fn) def wrapper(self, *args, **kwargs): callback = kwargs.pop("callback", None) diff --git a/lib/tornado/curl_httpclient.py b/lib/tornado/curl_httpclient.py index ae6f114a95..22f2502322 100644 --- a/lib/tornado/curl_httpclient.py +++ b/lib/tornado/curl_httpclient.py @@ -387,17 +387,28 @@ def write_function(chunk): else: raise KeyError('unknown method ' + request.method) - # Handle curl's cryptic options for every individual HTTP method - if request.method == "GET": - if request.body is not None: - raise ValueError('Body must be None for GET request') - elif request.method in ("POST", "PUT") or request.body: - if request.body is None: + body_expected = request.method in ("POST", "PATCH", "PUT") + body_present = request.body is not None + if not request.allow_nonstandard_methods: + # Some HTTP methods nearly always have bodies while others + # almost never do. Fail in this case unless the user has + # opted out of sanity checks with allow_nonstandard_methods. + if ((body_expected and not body_present) or + (body_present and not body_expected)): raise ValueError( - 'Body must not be None for "%s" request' - % request.method) - - request_buffer = BytesIO(utf8(request.body)) + 'Body must %sbe None for method %s (unless ' + 'allow_nonstandard_methods is true)' % + ('not ' if body_expected else '', request.method)) + + if body_expected or body_present: + if request.method == "GET": + # Even with `allow_nonstandard_methods` we disallow + # GET with a body (because libcurl doesn't allow it + # unless we use CUSTOMREQUEST). While the spec doesn't + # forbid clients from sending a body, it arguably + # disallows the server from doing anything with them. + raise ValueError('Body must be None for GET request') + request_buffer = BytesIO(utf8(request.body or '')) def ioctl(cmd): if cmd == curl.IOCMD_RESTARTREAD: @@ -405,10 +416,10 @@ def ioctl(cmd): curl.setopt(pycurl.READFUNCTION, request_buffer.read) curl.setopt(pycurl.IOCTLFUNCTION, ioctl) if request.method == "POST": - curl.setopt(pycurl.POSTFIELDSIZE, len(request.body)) + curl.setopt(pycurl.POSTFIELDSIZE, len(request.body or '')) else: curl.setopt(pycurl.UPLOAD, True) - curl.setopt(pycurl.INFILESIZE, len(request.body)) + curl.setopt(pycurl.INFILESIZE, len(request.body or '')) if request.auth_username is not None: userpwd = "%s:%s" % (request.auth_username, request.auth_password or '') @@ -454,7 +465,8 @@ def _curl_header_callback(self, headers, header_callback, header_line): if header_callback is not None: self.io_loop.add_callback(header_callback, header_line) # header_line as returned by curl includes the end-of-line characters. - header_line = header_line.strip() + # whitespace at the start should be preserved to allow multi-line headers + header_line = header_line.rstrip() if header_line.startswith("HTTP/"): headers.clear() try: diff --git a/lib/tornado/gen.py b/lib/tornado/gen.py index 9145768951..bf184e5484 100644 --- a/lib/tornado/gen.py +++ b/lib/tornado/gen.py @@ -79,9 +79,10 @@ def get(self): import collections import functools import itertools +import os import sys +import textwrap import types -import weakref from tornado.concurrent import Future, TracebackFuture, is_future, chain_future from tornado.ioloop import IOLoop @@ -90,12 +91,43 @@ def get(self): from tornado.util import raise_exc_info try: - from functools import singledispatch # py34+ -except ImportError as e: try: + from functools import singledispatch # py34+ + except ImportError: from singledispatch import singledispatch # backport +except ImportError: + # In most cases, singledispatch is required (to avoid + # difficult-to-diagnose problems in which the functionality + # available differs depending on which invisble packages are + # installed). However, in Google App Engine third-party + # dependencies are more trouble so we allow this module to be + # imported without it. + if 'APPENGINE_RUNTIME' not in os.environ: + raise + singledispatch = None + +try: + try: + from collections.abc import Generator as GeneratorType # py35+ except ImportError: - singledispatch = None + from backports_abc import Generator as GeneratorType + + try: + from inspect import isawaitable # py35+ + except ImportError: + from backports_abc import isawaitable +except ImportError: + if 'APPENGINE_RUNTIME' not in os.environ: + raise + from types import GeneratorType + + def isawaitable(x): + return False + +try: + import builtins # py3 +except ImportError: + import __builtin__ as builtins class KeyReuseError(Exception): @@ -122,6 +154,21 @@ class TimeoutError(Exception): """Exception raised by ``with_timeout``.""" +def _value_from_stopiteration(e): + try: + # StopIteration has a value attribute beginning in py33. + # So does our Return class. + return e.value + except AttributeError: + pass + try: + # Cython backports coroutine functionality by putting the value in + # e.args[0]. + return e.args[0] + except (AttributeError, IndexError): + return None + + def engine(func): """Callback-oriented decorator for asynchronous generators. @@ -202,6 +249,11 @@ def _make_coroutine_wrapper(func, replace_callback): argument, so we cannot simply implement ``@engine`` in terms of ``@coroutine``. """ + # On Python 3.5, set the coroutine flag on our generator, to allow it + # to be used with 'await'. + if hasattr(types, 'coroutine'): + func = types.coroutine(func) + @functools.wraps(func) def wrapper(*args, **kwargs): future = TracebackFuture() @@ -214,12 +266,12 @@ def wrapper(*args, **kwargs): try: result = func(*args, **kwargs) except (Return, StopIteration) as e: - result = getattr(e, 'value', None) + result = _value_from_stopiteration(e) except Exception: future.set_exc_info(sys.exc_info()) return future else: - if isinstance(result, types.GeneratorType): + if isinstance(result, GeneratorType): # Inline the first iteration of Runner.run. This lets us # avoid the cost of creating a Runner when the coroutine # never actually yields, which in turn allows us to @@ -235,7 +287,7 @@ def wrapper(*args, **kwargs): 'stack_context inconsistency (probably caused ' 'by yield within a "with StackContext" block)')) except (StopIteration, Return) as e: - future.set_result(getattr(e, 'value', None)) + future.set_result(_value_from_stopiteration(e)) except Exception: future.set_exc_info(sys.exc_info()) else: @@ -280,6 +332,8 @@ def fetch_json(url): def __init__(self, value=None): super(Return, self).__init__() self.value = value + # Cython recognizes subclasses of StopIteration with a .args tuple. + self.args = (value,) class WaitIterator(object): @@ -318,7 +372,22 @@ class WaitIterator(object): arguments were used in the construction of the `WaitIterator`, ``current_index`` will use the corresponding keyword). + On Python 3.5, `WaitIterator` implements the async iterator + protocol, so it can be used with the ``async for`` statement (note + that in this version the entire iteration is aborted if any value + raises an exception, while the previous example can continue past + individual errors):: + + async for result in gen.WaitIterator(future1, future2): + print("Result {} received from {} at {}".format( + result, wait_iterator.current_future, + wait_iterator.current_index)) + .. versionadded:: 4.1 + + .. versionchanged:: 4.3 + Added ``async for`` support in Python 3.5. + """ def __init__(self, *args, **kwargs): if args and kwargs: @@ -375,6 +444,16 @@ def _return_result(self, done): self.current_future = done self.current_index = self._unfinished.pop(done) + @coroutine + def __aiter__(self): + raise Return(self) + + def __anext__(self): + if self.done(): + # Lookup by name to silence pyflakes on older versions. + raise getattr(builtins, 'StopAsyncIteration')() + return self.next() + class YieldPoint(object): """Base class for objects that may be yielded from the generator. @@ -539,27 +618,91 @@ def get_result(self): return self.result_fn() -class Multi(YieldPoint): +def _contains_yieldpoint(children): + """Returns True if ``children`` contains any YieldPoints. + + ``children`` may be a dict or a list, as used by `MultiYieldPoint` + and `multi_future`. + """ + if isinstance(children, dict): + return any(isinstance(i, YieldPoint) for i in children.values()) + if isinstance(children, list): + return any(isinstance(i, YieldPoint) for i in children) + return False + + +def multi(children, quiet_exceptions=()): """Runs multiple asynchronous operations in parallel. - Takes a list of ``YieldPoints`` or ``Futures`` and returns a list of - their responses. It is not necessary to call `Multi` explicitly, - since the engine will do so automatically when the generator yields - a list of ``YieldPoints`` or a mixture of ``YieldPoints`` and ``Futures``. + ``children`` may either be a list or a dict whose values are + yieldable objects. ``multi()`` returns a new yieldable + object that resolves to a parallel structure containing their + results. If ``children`` is a list, the result is a list of + results in the same order; if it is a dict, the result is a dict + with the same keys. + + That is, ``results = yield multi(list_of_futures)`` is equivalent + to:: + + results = [] + for future in list_of_futures: + results.append(yield future) + + If any children raise exceptions, ``multi()`` will raise the first + one. All others will be logged, unless they are of types + contained in the ``quiet_exceptions`` argument. - Instead of a list, the argument may also be a dictionary whose values are - Futures, in which case a parallel dictionary is returned mapping the same - keys to their results. + If any of the inputs are `YieldPoints `, the returned + yieldable object is a `YieldPoint`. Otherwise, returns a `.Future`. + This means that the result of `multi` can be used in a native + coroutine if and only if all of its children can be. - It is not normally necessary to call this class directly, as it - will be created automatically as needed. However, calling it directly - allows you to use the ``quiet_exceptions`` argument to control - the logging of multiple exceptions. + In a ``yield``-based coroutine, it is not normally necessary to + call this function directly, since the coroutine runner will + do it automatically when a list or dict is yielded. However, + it is necessary in ``await``-based coroutines, or to pass + the ``quiet_exceptions`` argument. + + This function is available under the names ``multi()`` and ``Multi()`` + for historical reasons. + + .. versionchanged:: 4.2 + If multiple yieldables fail, any exceptions after the first + (which is raised) will be logged. Added the ``quiet_exceptions`` + argument to suppress this logging for selected exception types. + + .. versionchanged:: 4.3 + Replaced the class ``Multi`` and the function ``multi_future`` + with a unified function ``multi``. Added support for yieldables + other than `YieldPoint` and `.Future`. + + """ + if _contains_yieldpoint(children): + return MultiYieldPoint(children, quiet_exceptions=quiet_exceptions) + else: + return multi_future(children, quiet_exceptions=quiet_exceptions) + +Multi = multi + + +class MultiYieldPoint(YieldPoint): + """Runs multiple asynchronous operations in parallel. + + This class is similar to `multi`, but it always creates a stack + context even when no children require it. It is not compatible with + native coroutines. .. versionchanged:: 4.2 If multiple ``YieldPoints`` fail, any exceptions after the first (which is raised) will be logged. Added the ``quiet_exceptions`` argument to suppress this logging for selected exception types. + + .. versionchanged:: 4.3 + Renamed from ``Multi`` to ``MultiYieldPoint``. The name ``Multi`` + remains as an alias for the equivalent `multi` function. + + .. deprecated:: 4.3 + Use `multi` instead. """ def __init__(self, children, quiet_exceptions=()): self.keys = None @@ -568,6 +711,8 @@ def __init__(self, children, quiet_exceptions=()): children = children.values() self.children = [] for i in children: + if not isinstance(i, YieldPoint): + i = convert_yielded(i) if is_future(i): i = YieldFuture(i) self.children.append(i) @@ -609,24 +754,8 @@ def get_result(self): def multi_future(children, quiet_exceptions=()): """Wait for multiple asynchronous futures in parallel. - Takes a list of ``Futures`` (but *not* other ``YieldPoints``) and returns - a new Future that resolves when all the other Futures are done. - If all the ``Futures`` succeeded, the returned Future's result is a list - of their results. If any failed, the returned Future raises the exception - of the first one to fail. - - Instead of a list, the argument may also be a dictionary whose values are - Futures, in which case a parallel dictionary is returned mapping the same - keys to their results. - - It is not normally necessary to call `multi_future` explcitly, - since the engine will do so automatically when the generator - yields a list of ``Futures``. However, calling it directly - allows you to use the ``quiet_exceptions`` argument to control - the logging of multiple exceptions. - - This function is faster than the `Multi` `YieldPoint` because it - does not require the creation of a stack context. + This function is similar to `multi`, but does not support + `YieldPoints `. .. versionadded:: 4.0 @@ -634,12 +763,16 @@ def multi_future(children, quiet_exceptions=()): If multiple ``Futures`` fail, any exceptions after the first (which is raised) will be logged. Added the ``quiet_exceptions`` argument to suppress this logging for selected exception types. + + .. deprecated:: 4.3 + Use `multi` instead. """ if isinstance(children, dict): keys = list(children.keys()) children = children.values() else: keys = None + children = list(map(convert_yielded, children)) assert all(is_future(i) for i in children) unfinished_children = set(children) @@ -682,6 +815,11 @@ def maybe_future(x): it is wrapped in a new `.Future`. This is suitable for use as ``result = yield gen.maybe_future(f())`` when you don't know whether ``f()`` returns a `.Future` or not. + + .. deprecated:: 4.3 + This function only handles ``Futures``, not other yieldable objects. + Instead of `maybe_future`, check for the non-future result types + you expect (often just ``None``), and ``yield`` anything unknown. """ if is_future(x): return x @@ -894,7 +1032,7 @@ def run(self): raise LeakedCallbackError( "finished without waiting for callbacks %r" % self.pending_callbacks) - self.result_future.set_result(getattr(e, 'value', None)) + self.result_future.set_result(_value_from_stopiteration(e)) self.result_future = None self._deactivate_stack_context() return @@ -912,13 +1050,9 @@ def run(self): def handle_yield(self, yielded): # Lists containing YieldPoints require stack contexts; - # other lists are handled via multi_future in convert_yielded. - if (isinstance(yielded, list) and - any(isinstance(f, YieldPoint) for f in yielded)): - yielded = Multi(yielded) - elif (isinstance(yielded, dict) and - any(isinstance(f, YieldPoint) for f in yielded.values())): - yielded = Multi(yielded) + # other lists are handled in convert_yielded. + if _contains_yieldpoint(yielded): + yielded = multi(yielded) if isinstance(yielded, YieldPoint): # YieldPoints are too closely coupled to the Runner to go @@ -1001,6 +1135,67 @@ def wrapper(*args, **kwargs): callback(None) return wrapper +# Convert Awaitables into Futures. It is unfortunately possible +# to have infinite recursion here if those Awaitables assume that +# we're using a different coroutine runner and yield objects +# we don't understand. If that happens, the solution is to +# register that runner's yieldable objects with convert_yielded. +if sys.version_info >= (3, 3): + exec(textwrap.dedent(""" + @coroutine + def _wrap_awaitable(x): + if hasattr(x, '__await__'): + x = x.__await__() + return (yield from x) + """)) +else: + # Py2-compatible version for use with Cython. + # Copied from PEP 380. + @coroutine + def _wrap_awaitable(x): + if hasattr(x, '__await__'): + _i = x.__await__() + else: + _i = iter(x) + try: + _y = next(_i) + except StopIteration as _e: + _r = _value_from_stopiteration(_e) + else: + while 1: + try: + _s = yield _y + except GeneratorExit as _e: + try: + _m = _i.close + except AttributeError: + pass + else: + _m() + raise _e + except BaseException as _e: + _x = sys.exc_info() + try: + _m = _i.throw + except AttributeError: + raise _e + else: + try: + _y = _m(*_x) + except StopIteration as _e: + _r = _value_from_stopiteration(_e) + break + else: + try: + if _s is None: + _y = next(_i) + else: + _y = _i.send(_s) + except StopIteration as _e: + _r = _value_from_stopiteration(_e) + break + raise Return(_r) + def convert_yielded(yielded): """Convert a yielded object into a `.Future`. @@ -1016,14 +1211,31 @@ def _(asyncio_future): .. versionadded:: 4.1 """ - # Lists and dicts containing YieldPoints were handled separately - # via Multi(). + # Lists and dicts containing YieldPoints were handled earlier. if isinstance(yielded, (list, dict)): - return multi_future(yielded) + return multi(yielded) elif is_future(yielded): return yielded + elif isawaitable(yielded): + return _wrap_awaitable(yielded) else: raise BadYieldError("yielded unknown object %r" % (yielded,)) if singledispatch is not None: convert_yielded = singledispatch(convert_yielded) + + try: + # If we can import t.p.asyncio, do it for its side effect + # (registering asyncio.Future with convert_yielded). + # It's ugly to do this here, but it prevents a cryptic + # infinite recursion in _wrap_awaitable. + # Note that even with this, asyncio integration is unlikely + # to work unless the application also configures AsyncIOLoop, + # but at least the error messages in that case are more + # comprehensible than a stack overflow. + import tornado.platform.asyncio + except ImportError: + pass + else: + # Reference the imported module to make pyflakes happy. + tornado diff --git a/lib/tornado/http1connection.py b/lib/tornado/http1connection.py index 6226ef7af2..1c577063b0 100644 --- a/lib/tornado/http1connection.py +++ b/lib/tornado/http1connection.py @@ -515,6 +515,12 @@ def _parse_headers(self, data): def _read_body(self, code, headers, delegate): if "Content-Length" in headers: + if "Transfer-Encoding" in headers: + # Response cannot contain both Content-Length and + # Transfer-Encoding headers. + # http://tools.ietf.org/html/rfc7230#section-3.3.3 + raise httputil.HTTPInputError( + "Response with both Transfer-Encoding and Content-Length") if "," in headers["Content-Length"]: # Proxies sometimes cause Content-Length headers to get # duplicated. If all the values are identical then we can @@ -558,7 +564,9 @@ def _read_fixed_body(self, content_length, delegate): content_length -= len(body) if not self._write_finished or self.is_client: with _ExceptionLoggingContext(app_log): - yield gen.maybe_future(delegate.data_received(body)) + ret = delegate.data_received(body) + if ret is not None: + yield ret @gen.coroutine def _read_chunked_body(self, delegate): @@ -579,7 +587,9 @@ def _read_chunked_body(self, delegate): bytes_to_read -= len(chunk) if not self._write_finished or self.is_client: with _ExceptionLoggingContext(app_log): - yield gen.maybe_future(delegate.data_received(chunk)) + ret = delegate.data_received(chunk) + if ret is not None: + yield ret # chunk ends with \r\n crlf = yield self.stream.read_bytes(2) assert crlf == b"\r\n" @@ -619,11 +629,14 @@ def data_received(self, chunk): decompressed = self._decompressor.decompress( compressed_data, self._chunk_size) if decompressed: - yield gen.maybe_future( - self._delegate.data_received(decompressed)) + ret = self._delegate.data_received(decompressed) + if ret is not None: + yield ret compressed_data = self._decompressor.unconsumed_tail else: - yield gen.maybe_future(self._delegate.data_received(chunk)) + ret = self._delegate.data_received(chunk) + if ret is not None: + yield ret def finish(self): if self._decompressor is not None: diff --git a/lib/tornado/httpclient.py b/lib/tornado/httpclient.py index c2e6862361..9179227b1e 100644 --- a/lib/tornado/httpclient.py +++ b/lib/tornado/httpclient.py @@ -603,9 +603,12 @@ class HTTPError(Exception): """ def __init__(self, code, message=None, response=None): self.code = code - message = message or httputil.responses.get(code, "Unknown") + self.message = message or httputil.responses.get(code, "Unknown") self.response = response - Exception.__init__(self, "HTTP %d: %s" % (self.code, message)) + super(HTTPError, self).__init__(code, message, response) + + def __str__(self): + return "HTTP %d: %s" % (self.code, self.message) class _RequestProxy(object): diff --git a/lib/tornado/httpserver.py b/lib/tornado/httpserver.py index 2dd04dd7a8..ff235fe46b 100644 --- a/lib/tornado/httpserver.py +++ b/lib/tornado/httpserver.py @@ -188,7 +188,6 @@ def on_close(self, server_conn): class _HTTPRequestContext(object): def __init__(self, stream, address, protocol): self.address = address - self.protocol = protocol # Save the socket's address family now so we know how to # interpret self.address even after the stream is closed # and its socket attribute replaced with None. diff --git a/lib/tornado/httputil.py b/lib/tornado/httputil.py index fa5e697c17..471df54f96 100644 --- a/lib/tornado/httputil.py +++ b/lib/tornado/httputil.py @@ -98,7 +98,7 @@ def __missing__(self, key): _normalized_headers = _NormalizedHeaderCache(1000) -class HTTPHeaders(dict): +class HTTPHeaders(collections.MutableMapping): """A dictionary that maintains ``Http-Header-Case`` for all keys. Supports multiple values per key via a pair of new methods, @@ -127,9 +127,7 @@ class HTTPHeaders(dict): Set-Cookie: C=D """ def __init__(self, *args, **kwargs): - # Don't pass args or kwargs to dict.__init__, as it will bypass - # our __setitem__ - dict.__init__(self) + self._dict = {} self._as_list = {} self._last_key = None if (len(args) == 1 and len(kwargs) == 0 and @@ -148,10 +146,8 @@ def add(self, name, value): norm_name = _normalized_headers[name] self._last_key = norm_name if norm_name in self: - # bypass our override of __setitem__ since it modifies _as_list - dict.__setitem__(self, norm_name, - native_str(self[norm_name]) + ',' + - native_str(value)) + self._dict[norm_name] = (native_str(self[norm_name]) + ',' + + native_str(value)) self._as_list[norm_name].append(value) else: self[norm_name] = value @@ -183,8 +179,7 @@ def parse_line(self, line): # continuation of a multi-line header new_part = ' ' + line.lstrip() self._as_list[self._last_key][-1] += new_part - dict.__setitem__(self, self._last_key, - self[self._last_key] + new_part) + self._dict[self._last_key] += new_part else: name, value = line.split(":", 1) self.add(name, value.strip()) @@ -203,45 +198,36 @@ def parse(cls, headers): h.parse_line(line) return h - # dict implementation overrides + # MutableMapping abstract method implementations. def __setitem__(self, name, value): norm_name = _normalized_headers[name] - dict.__setitem__(self, norm_name, value) + self._dict[norm_name] = value self._as_list[norm_name] = [value] def __getitem__(self, name): - return dict.__getitem__(self, _normalized_headers[name]) + return self._dict[_normalized_headers[name]] def __delitem__(self, name): norm_name = _normalized_headers[name] - dict.__delitem__(self, norm_name) + del self._dict[norm_name] del self._as_list[norm_name] - def __contains__(self, name): - norm_name = _normalized_headers[name] - return dict.__contains__(self, norm_name) - - def get(self, name, default=None): - return dict.get(self, _normalized_headers[name], default) + def __len__(self): + return len(self._dict) - def update(self, *args, **kwargs): - # dict.update bypasses our __setitem__ - for k, v in dict(*args, **kwargs).items(): - self[k] = v + def __iter__(self): + return iter(self._dict) def copy(self): - # default implementation returns dict(self), not the subclass + # defined in dict but not in MutableMapping. return HTTPHeaders(self) # Use our overridden copy method for the copy.copy module. + # This makes shallow copies one level deeper, but preserves + # the appearance that HTTPHeaders is a single container. __copy__ = copy - def __deepcopy__(self, memo_dict): - # Our values are immutable strings, so our standard copy is - # effectively a deep copy. - return self.copy() - class HTTPServerRequest(object): """A single HTTP request. diff --git a/lib/tornado/ioloop.py b/lib/tornado/ioloop.py index 67e33b521f..c23cb33e48 100644 --- a/lib/tornado/ioloop.py +++ b/lib/tornado/ioloop.py @@ -249,7 +249,7 @@ def initialize(self, make_current=None): if IOLoop.current(instance=False) is None: self.make_current() elif make_current: - if IOLoop.current(instance=False) is None: + if IOLoop.current(instance=False) is not None: raise RuntimeError("current IOLoop already exists") self.make_current() @@ -400,10 +400,12 @@ def stop(self): def run_sync(self, func, timeout=None): """Starts the `IOLoop`, runs the given function, and stops the loop. - If the function returns a `.Future`, the `IOLoop` will run - until the future is resolved. If it raises an exception, the - `IOLoop` will stop and the exception will be re-raised to the - caller. + The function must return either a yieldable object or + ``None``. If the function returns a yieldable object, the + `IOLoop` will run until the yieldable is resolved (and + `run_sync()` will return the yieldable's result). If it raises + an exception, the `IOLoop` will stop and the exception will be + re-raised to the caller. The keyword-only argument ``timeout`` may be used to set a maximum duration for the function. If the timeout expires, @@ -418,12 +420,18 @@ def main(): if __name__ == '__main__': IOLoop.current().run_sync(main) + + .. versionchanged:: 4.3 + Returning a non-``None``, non-yieldable value is now an error. """ future_cell = [None] def run(): try: result = func() + if result is not None: + from tornado.gen import convert_yielded + result = convert_yielded(result) except Exception: future_cell[0] = TracebackFuture() future_cell[0].set_exc_info(sys.exc_info()) @@ -590,12 +598,21 @@ def _run_callback(self, callback): """ try: ret = callback() - if ret is not None and is_future(ret): + if ret is not None: + from tornado import gen # Functions that return Futures typically swallow all # exceptions and store them in the Future. If a Future # makes it out to the IOLoop, ensure its exception (if any) # gets logged too. - self.add_future(ret, lambda f: f.result()) + try: + ret = gen.convert_yielded(ret) + except gen.BadYieldError: + # It's not unusual for add_callback to be used with + # methods returning a non-None and non-yieldable + # result, which should just be ignored. + pass + else: + self.add_future(ret, lambda f: f.result()) except Exception: self.handle_callback_exception(callback) @@ -909,38 +926,40 @@ def remove_timeout(self, timeout): self._cancellations += 1 def add_callback(self, callback, *args, **kwargs): - with self._callback_lock: + if thread.get_ident() != self._thread_ident: + # If we're not on the IOLoop's thread, we need to synchronize + # with other threads, or waking logic will induce a race. + with self._callback_lock: + if self._closing: + return + list_empty = not self._callbacks + self._callbacks.append(functools.partial( + stack_context.wrap(callback), *args, **kwargs)) + if list_empty: + # If we're not in the IOLoop's thread, and we added the + # first callback to an empty list, we may need to wake it + # up (it may wake up on its own, but an occasional extra + # wake is harmless). Waking up a polling IOLoop is + # relatively expensive, so we try to avoid it when we can. + self._waker.wake() + else: if self._closing: - raise RuntimeError("IOLoop is closing") - list_empty = not self._callbacks + return + # If we're on the IOLoop's thread, we don't need the lock, + # since we don't need to wake anyone, just add the + # callback. Blindly insert into self._callbacks. This is + # safe even from signal handlers because the GIL makes + # list.append atomic. One subtlety is that if the signal + # is interrupting another thread holding the + # _callback_lock block in IOLoop.start, we may modify + # either the old or new version of self._callbacks, but + # either way will work. self._callbacks.append(functools.partial( stack_context.wrap(callback), *args, **kwargs)) - if list_empty and thread.get_ident() != self._thread_ident: - # If we're in the IOLoop's thread, we know it's not currently - # polling. If we're not, and we added the first callback to an - # empty list, we may need to wake it up (it may wake up on its - # own, but an occasional extra wake is harmless). Waking - # up a polling IOLoop is relatively expensive, so we try to - # avoid it when we can. - self._waker.wake() def add_callback_from_signal(self, callback, *args, **kwargs): with stack_context.NullContext(): - if thread.get_ident() != self._thread_ident: - # if the signal is handled on another thread, we can add - # it normally (modulo the NullContext) - self.add_callback(callback, *args, **kwargs) - else: - # If we're on the IOLoop's thread, we cannot use - # the regular add_callback because it may deadlock on - # _callback_lock. Blindly insert into self._callbacks. - # This is safe because the GIL makes list.append atomic. - # One subtlety is that if the signal interrupted the - # _callback_lock block in IOLoop.start, we may modify - # either the old or new version of self._callbacks, - # but either way will work. - self._callbacks.append(functools.partial( - stack_context.wrap(callback), *args, **kwargs)) + self.add_callback(callback, *args, **kwargs) class _Timeout(object): diff --git a/lib/tornado/iostream.py b/lib/tornado/iostream.py index 3a175a6796..4e304f8900 100644 --- a/lib/tornado/iostream.py +++ b/lib/tornado/iostream.py @@ -89,8 +89,16 @@ class StreamClosedError(IOError): Note that the close callback is scheduled to run *after* other callbacks on the stream (to allow for buffered data to be processed), so you may see this error before you see the close callback. + + The ``real_error`` attribute contains the underlying error that caused + the stream to close (if any). + + .. versionchanged:: 4.3 + Added the ``real_error`` attribute. """ - pass + def __init__(self, real_error=None): + super(StreamClosedError, self).__init__('Stream is closed') + self.real_error = real_error class UnsatisfiableReadError(Exception): @@ -344,7 +352,8 @@ def read_until_close(self, callback=None, streaming_callback=None): try: self._try_inline_read() except: - future.add_done_callback(lambda f: f.exception()) + if future is not None: + future.add_done_callback(lambda f: f.exception()) raise return future @@ -446,13 +455,7 @@ def _maybe_run_close_callback(self): futures.append(self._ssl_connect_future) self._ssl_connect_future = None for future in futures: - if self._is_connreset(self.error): - # Treat connection resets as closed connections so - # clients only have to catch one kind of exception - # to avoid logging. - future.set_exception(StreamClosedError()) - else: - future.set_exception(self.error or StreamClosedError()) + future.set_exception(StreamClosedError(real_error=self.error)) if self._close_callback is not None: cb = self._close_callback self._close_callback = None @@ -644,8 +647,8 @@ def _handle_read(self): pos = self._read_to_buffer_loop() except UnsatisfiableReadError: raise - except Exception: - gen_log.warning("error on read", exc_info=True) + except Exception as e: + gen_log.warning("error on read: %s" % e) self.close(exc_info=True) return if pos is not None: @@ -722,18 +725,22 @@ def _read_to_buffer(self): to read (i.e. the read returns EWOULDBLOCK or equivalent). On error closes the socket and raises an exception. """ - try: - chunk = self.read_from_fd() - except (socket.error, IOError, OSError) as e: - # ssl.SSLError is a subclass of socket.error - if self._is_connreset(e): - # Treat ECONNRESET as a connection close rather than - # an error to minimize log spam (the exception will - # be available on self.error for apps that care). + while True: + try: + chunk = self.read_from_fd() + except (socket.error, IOError, OSError) as e: + if errno_from_exception(e) == errno.EINTR: + continue + # ssl.SSLError is a subclass of socket.error + if self._is_connreset(e): + # Treat ECONNRESET as a connection close rather than + # an error to minimize log spam (the exception will + # be available on self.error for apps that care). + self.close(exc_info=True) + return self.close(exc_info=True) - return - self.close(exc_info=True) - raise + raise + break if chunk is None: return 0 self._read_buffer.append(chunk) @@ -875,7 +882,7 @@ def _consume(self, loc): def _check_closed(self): if self.closed(): - raise StreamClosedError("Stream is closed") + raise StreamClosedError(real_error=self.error) def _maybe_add_error_listener(self): # This method is part of an optimization: to detect a connection that @@ -1148,6 +1155,15 @@ def start_tls(self, server_side, ssl_options=None, server_hostname=None): def close_callback(): if not future.done(): + # Note that unlike most Futures returned by IOStream, + # this one passes the underlying error through directly + # instead of wrapping everything in a StreamClosedError + # with a real_error attribute. This is because once the + # connection is established it's more helpful to raise + # the SSLError directly than to hide it behind a + # StreamClosedError (and the client is expecting SSL + # issues rather than network issues since this method is + # named start_tls). future.set_exception(ssl_stream.error or StreamClosedError()) if orig_close_callback is not None: orig_close_callback() @@ -1262,10 +1278,11 @@ def _do_ssl_handshake(self): raise except socket.error as err: # Some port scans (e.g. nmap in -sT mode) have been known - # to cause do_handshake to raise EBADF, so make that error - # quiet as well. + # to cause do_handshake to raise EBADF and ENOTCONN, so make + # those errors quiet as well. # https://groups.google.com/forum/?fromgroups#!topic/python-tornado/ApucKJat1_0 - if self._is_connreset(err) or err.args[0] == errno.EBADF: + if (self._is_connreset(err) or + err.args[0] in (errno.EBADF, errno.ENOTCONN)): return self.close(exc_info=True) raise except AttributeError: @@ -1311,8 +1328,8 @@ def _verify_cert(self, peercert): return False try: ssl_match_hostname(peercert, self._server_hostname) - except SSLCertificateError: - gen_log.warning("Invalid SSL certificate", exc_info=True) + except SSLCertificateError as e: + gen_log.warning("Invalid SSL certificate: %s" % e) return False else: return True diff --git a/lib/tornado/locale.py b/lib/tornado/locale.py index a668765bbc..8310c4d4c6 100644 --- a/lib/tornado/locale.py +++ b/lib/tornado/locale.py @@ -41,8 +41,10 @@ from __future__ import absolute_import, division, print_function, with_statement +import codecs import csv import datetime +from io import BytesIO import numbers import os import re @@ -51,6 +53,8 @@ from tornado.log import gen_log from tornado.util import u +from tornado._locale_data import LOCALE_NAMES + _default_locale = "en_US" _translations = {} _supported_locales = frozenset([_default_locale]) @@ -86,7 +90,7 @@ def set_default_locale(code): _supported_locales = frozenset(list(_translations.keys()) + [_default_locale]) -def load_translations(directory): +def load_translations(directory, encoding=None): """Loads translations from CSV files in a directory. Translations are strings with optional Python-style named placeholders @@ -106,12 +110,20 @@ def load_translations(directory): The file is read using the `csv` module in the default "excel" dialect. In this format there should not be spaces after the commas. + If no ``encoding`` parameter is given, the encoding will be + detected automatically (among UTF-8 and UTF-16) if the file + contains a byte-order marker (BOM), defaulting to UTF-8 if no BOM + is present. + Example translation ``es_LA.csv``:: "I love you","Te amo" "%(name)s liked this","A %(name)s les gustó esto","plural" "%(name)s liked this","A %(name)s le gustó esto","singular" + .. versionchanged:: 4.3 + Added ``encoding`` parameter. Added support for BOM-based encoding + detection, UTF-16, and UTF-8-with-BOM. """ global _translations global _supported_locales @@ -125,13 +137,29 @@ def load_translations(directory): os.path.join(directory, path)) continue full_path = os.path.join(directory, path) + if encoding is None: + # Try to autodetect encoding based on the BOM. + with open(full_path, 'rb') as f: + data = f.read(len(codecs.BOM_UTF16_LE)) + if data in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): + encoding = 'utf-16' + else: + # utf-8-sig is "utf-8 with optional BOM". It's discouraged + # in most cases but is common with CSV files because Excel + # cannot read utf-8 files without a BOM. + encoding = 'utf-8-sig' try: # python 3: csv.reader requires a file open in text mode. # Force utf8 to avoid dependence on $LANG environment variable. - f = open(full_path, "r", encoding="utf-8") + f = open(full_path, "r", encoding=encoding) except TypeError: - # python 2: files return byte strings, which are decoded below. - f = open(full_path, "r") + # python 2: csv can only handle byte strings (in ascii-compatible + # encodings), which we decode below. Transcode everything into + # utf8 before passing it to csv.reader. + f = BytesIO() + with codecs.open(full_path, "r", encoding=encoding) as infile: + f.write(escape.utf8(infile.read())) + f.seek(0) _translations[locale] = {} for i, row in enumerate(csv.reader(f)): if not row or len(row) < 2: @@ -477,8 +505,8 @@ def pgettext(self, context, message, plural_message=None, count=None): if plural_message is not None: assert count is not None msgs_with_ctxt = ("%s%s%s" % (context, CONTEXT_SEPARATOR, message), - "%s%s%s" % (context, CONTEXT_SEPARATOR, plural_message), - count) + "%s%s%s" % (context, CONTEXT_SEPARATOR, plural_message), + count) result = self.ngettext(*msgs_with_ctxt) if CONTEXT_SEPARATOR in result: # Translation not found @@ -491,68 +519,3 @@ def pgettext(self, context, message, plural_message=None, count=None): # Translation not found result = message return result - -LOCALE_NAMES = { - "af_ZA": {"name_en": u("Afrikaans"), "name": u("Afrikaans")}, - "am_ET": {"name_en": u("Amharic"), "name": u('\u12a0\u121b\u122d\u129b')}, - "ar_AR": {"name_en": u("Arabic"), "name": u("\u0627\u0644\u0639\u0631\u0628\u064a\u0629")}, - "bg_BG": {"name_en": u("Bulgarian"), "name": u("\u0411\u044a\u043b\u0433\u0430\u0440\u0441\u043a\u0438")}, - "bn_IN": {"name_en": u("Bengali"), "name": u("\u09ac\u09be\u0982\u09b2\u09be")}, - "bs_BA": {"name_en": u("Bosnian"), "name": u("Bosanski")}, - "ca_ES": {"name_en": u("Catalan"), "name": u("Catal\xe0")}, - "cs_CZ": {"name_en": u("Czech"), "name": u("\u010ce\u0161tina")}, - "cy_GB": {"name_en": u("Welsh"), "name": u("Cymraeg")}, - "da_DK": {"name_en": u("Danish"), "name": u("Dansk")}, - "de_DE": {"name_en": u("German"), "name": u("Deutsch")}, - "el_GR": {"name_en": u("Greek"), "name": u("\u0395\u03bb\u03bb\u03b7\u03bd\u03b9\u03ba\u03ac")}, - "en_GB": {"name_en": u("English (UK)"), "name": u("English (UK)")}, - "en_US": {"name_en": u("English (US)"), "name": u("English (US)")}, - "es_ES": {"name_en": u("Spanish (Spain)"), "name": u("Espa\xf1ol (Espa\xf1a)")}, - "es_LA": {"name_en": u("Spanish"), "name": u("Espa\xf1ol")}, - "et_EE": {"name_en": u("Estonian"), "name": u("Eesti")}, - "eu_ES": {"name_en": u("Basque"), "name": u("Euskara")}, - "fa_IR": {"name_en": u("Persian"), "name": u("\u0641\u0627\u0631\u0633\u06cc")}, - "fi_FI": {"name_en": u("Finnish"), "name": u("Suomi")}, - "fr_CA": {"name_en": u("French (Canada)"), "name": u("Fran\xe7ais (Canada)")}, - "fr_FR": {"name_en": u("French"), "name": u("Fran\xe7ais")}, - "ga_IE": {"name_en": u("Irish"), "name": u("Gaeilge")}, - "gl_ES": {"name_en": u("Galician"), "name": u("Galego")}, - "he_IL": {"name_en": u("Hebrew"), "name": u("\u05e2\u05d1\u05e8\u05d9\u05ea")}, - "hi_IN": {"name_en": u("Hindi"), "name": u("\u0939\u093f\u0928\u094d\u0926\u0940")}, - "hr_HR": {"name_en": u("Croatian"), "name": u("Hrvatski")}, - "hu_HU": {"name_en": u("Hungarian"), "name": u("Magyar")}, - "id_ID": {"name_en": u("Indonesian"), "name": u("Bahasa Indonesia")}, - "is_IS": {"name_en": u("Icelandic"), "name": u("\xcdslenska")}, - "it_IT": {"name_en": u("Italian"), "name": u("Italiano")}, - "ja_JP": {"name_en": u("Japanese"), "name": u("\u65e5\u672c\u8a9e")}, - "ko_KR": {"name_en": u("Korean"), "name": u("\ud55c\uad6d\uc5b4")}, - "lt_LT": {"name_en": u("Lithuanian"), "name": u("Lietuvi\u0173")}, - "lv_LV": {"name_en": u("Latvian"), "name": u("Latvie\u0161u")}, - "mk_MK": {"name_en": u("Macedonian"), "name": u("\u041c\u0430\u043a\u0435\u0434\u043e\u043d\u0441\u043a\u0438")}, - "ml_IN": {"name_en": u("Malayalam"), "name": u("\u0d2e\u0d32\u0d2f\u0d3e\u0d33\u0d02")}, - "ms_MY": {"name_en": u("Malay"), "name": u("Bahasa Melayu")}, - "nb_NO": {"name_en": u("Norwegian (bokmal)"), "name": u("Norsk (bokm\xe5l)")}, - "nl_NL": {"name_en": u("Dutch"), "name": u("Nederlands")}, - "nn_NO": {"name_en": u("Norwegian (nynorsk)"), "name": u("Norsk (nynorsk)")}, - "pa_IN": {"name_en": u("Punjabi"), "name": u("\u0a2a\u0a70\u0a1c\u0a3e\u0a2c\u0a40")}, - "pl_PL": {"name_en": u("Polish"), "name": u("Polski")}, - "pt_BR": {"name_en": u("Portuguese (Brazil)"), "name": u("Portugu\xeas (Brasil)")}, - "pt_PT": {"name_en": u("Portuguese (Portugal)"), "name": u("Portugu\xeas (Portugal)")}, - "ro_RO": {"name_en": u("Romanian"), "name": u("Rom\xe2n\u0103")}, - "ru_RU": {"name_en": u("Russian"), "name": u("\u0420\u0443\u0441\u0441\u043a\u0438\u0439")}, - "sk_SK": {"name_en": u("Slovak"), "name": u("Sloven\u010dina")}, - "sl_SI": {"name_en": u("Slovenian"), "name": u("Sloven\u0161\u010dina")}, - "sq_AL": {"name_en": u("Albanian"), "name": u("Shqip")}, - "sr_RS": {"name_en": u("Serbian"), "name": u("\u0421\u0440\u043f\u0441\u043a\u0438")}, - "sv_SE": {"name_en": u("Swedish"), "name": u("Svenska")}, - "sw_KE": {"name_en": u("Swahili"), "name": u("Kiswahili")}, - "ta_IN": {"name_en": u("Tamil"), "name": u("\u0ba4\u0bae\u0bbf\u0bb4\u0bcd")}, - "te_IN": {"name_en": u("Telugu"), "name": u("\u0c24\u0c46\u0c32\u0c41\u0c17\u0c41")}, - "th_TH": {"name_en": u("Thai"), "name": u("\u0e20\u0e32\u0e29\u0e32\u0e44\u0e17\u0e22")}, - "tl_PH": {"name_en": u("Filipino"), "name": u("Filipino")}, - "tr_TR": {"name_en": u("Turkish"), "name": u("T\xfcrk\xe7e")}, - "uk_UA": {"name_en": u("Ukraini "), "name": u("\u0423\u043a\u0440\u0430\u0457\u043d\u0441\u044c\u043a\u0430")}, - "vi_VN": {"name_en": u("Vietnamese"), "name": u("Ti\u1ebfng Vi\u1ec7t")}, - "zh_CN": {"name_en": u("Chinese (Simplified)"), "name": u("\u4e2d\u6587(\u7b80\u4f53)")}, - "zh_TW": {"name_en": u("Chinese (Traditional)"), "name": u("\u4e2d\u6587(\u7e41\u9ad4)")}, -} diff --git a/lib/tornado/locks.py b/lib/tornado/locks.py index 4b0bdb38f1..a181772861 100644 --- a/lib/tornado/locks.py +++ b/lib/tornado/locks.py @@ -12,13 +12,6 @@ # License for the specific language governing permissions and limitations # under the License. -""" -.. testsetup:: * - - from tornado import ioloop, gen, locks - io_loop = ioloop.IOLoop.current() -""" - from __future__ import absolute_import, division, print_function, with_statement __all__ = ['Condition', 'Event', 'Semaphore', 'BoundedSemaphore', 'Lock'] @@ -61,7 +54,11 @@ class Condition(_TimeoutGarbageCollector): .. testcode:: - condition = locks.Condition() + from tornado import gen + from tornado.ioloop import IOLoop + from tornado.locks import Condition + + condition = Condition() @gen.coroutine def waiter(): @@ -80,7 +77,7 @@ def runner(): # Yield two Futures; wait for waiter() and notifier() to finish. yield [waiter(), notifier()] - io_loop.run_sync(runner) + IOLoop.current().run_sync(runner) .. testoutput:: @@ -92,7 +89,7 @@ def runner(): `wait` takes an optional ``timeout`` argument, which is either an absolute timestamp:: - io_loop = ioloop.IOLoop.current() + io_loop = IOLoop.current() # Wait up to 1 second for a notification. yield condition.wait(timeout=io_loop.time() + 1) @@ -161,7 +158,11 @@ class Event(object): .. testcode:: - event = locks.Event() + from tornado import gen + from tornado.ioloop import IOLoop + from tornado.locks import Event + + event = Event() @gen.coroutine def waiter(): @@ -180,7 +181,7 @@ def setter(): def runner(): yield [waiter(), setter()] - io_loop.run_sync(runner) + IOLoop.current().run_sync(runner) .. testoutput:: @@ -210,7 +211,7 @@ def set(self): def clear(self): """Reset the internal flag to ``False``. - + Calls to `.wait` will block until `.set` is called. """ if self._future.done(): @@ -261,7 +262,8 @@ class Semaphore(_TimeoutGarbageCollector): from collections import deque - from tornado import gen, ioloop + from tornado import gen + from tornado.ioloop import IOLoop from tornado.concurrent import Future # Ensure reliable doctest output: resolve Futures one at a time. @@ -273,14 +275,18 @@ def simulator(futures): yield gen.moment f.set_result(None) - ioloop.IOLoop.current().add_callback(simulator, list(futures_q)) + IOLoop.current().add_callback(simulator, list(futures_q)) def use_some_resource(): return futures_q.popleft() .. testcode:: semaphore - sem = locks.Semaphore(2) + from tornado import gen + from tornado.ioloop import IOLoop + from tornado.locks import Semaphore + + sem = Semaphore(2) @gen.coroutine def worker(worker_id): @@ -297,7 +303,7 @@ def runner(): # Join all workers. yield [worker(i) for i in range(3)] - io_loop.run_sync(runner) + IOLoop.current().run_sync(runner) .. testoutput:: semaphore @@ -321,6 +327,20 @@ def worker(worker_id): # Now the semaphore has been released. print("Worker %d is done" % worker_id) + + In Python 3.5, the semaphore itself can be used as an async context + manager:: + + async def worker(worker_id): + async with sem: + print("Worker %d is working" % worker_id) + await use_some_resource() + + # Now the semaphore has been released. + print("Worker %d is done" % worker_id) + + .. versionchanged:: 4.3 + Added ``async with`` support in Python 3.5. """ def __init__(self, value=1): super(Semaphore, self).__init__() @@ -383,6 +403,14 @@ def __enter__(self): __exit__ = __enter__ + @gen.coroutine + def __aenter__(self): + yield self.acquire() + + @gen.coroutine + def __aexit__(self, typ, value, tb): + self.release() + class BoundedSemaphore(Semaphore): """A semaphore that prevents release() being called too many times. @@ -412,7 +440,7 @@ class Lock(object): Releasing an unlocked lock raises `RuntimeError`. - `acquire` supports the context manager protocol: + `acquire` supports the context manager protocol in all Python versions: >>> from tornado import gen, locks >>> lock = locks.Lock() @@ -424,6 +452,22 @@ class Lock(object): ... pass ... ... # Now the lock is released. + + In Python 3.5, `Lock` also supports the async context manager + protocol. Note that in this case there is no `acquire`, because + ``async with`` includes both the ``yield`` and the ``acquire`` + (just as it does with `threading.Lock`): + + >>> async def f(): # doctest: +SKIP + ... async with lock: + ... # Do something holding the lock. + ... pass + ... + ... # Now the lock is released. + + .. versionchanged:: 3.5 + Added ``async with`` support in Python 3.5. + """ def __init__(self): self._block = BoundedSemaphore(value=1) @@ -458,3 +502,11 @@ def __enter__(self): "Use Lock like 'with (yield lock)', not like 'with lock'") __exit__ = __enter__ + + @gen.coroutine + def __aenter__(self): + yield self.acquire() + + @gen.coroutine + def __aexit__(self, typ, value, tb): + self.release() diff --git a/lib/tornado/log.py b/lib/tornado/log.py index c68dec46ba..040889a989 100644 --- a/lib/tornado/log.py +++ b/lib/tornado/log.py @@ -190,10 +190,22 @@ def enable_pretty_logging(options=None, logger=None): logger = logging.getLogger() logger.setLevel(getattr(logging, options.logging.upper())) if options.log_file_prefix: - channel = logging.handlers.RotatingFileHandler( - filename=options.log_file_prefix, - maxBytes=options.log_file_max_size, - backupCount=options.log_file_num_backups) + rotate_mode = options.log_rotate_mode + if rotate_mode == 'size': + channel = logging.handlers.RotatingFileHandler( + filename=options.log_file_prefix, + maxBytes=options.log_file_max_size, + backupCount=options.log_file_num_backups) + elif rotate_mode == 'time': + channel = logging.handlers.TimedRotatingFileHandler( + filename=options.log_file_prefix, + when=options.log_rotate_when, + interval=options.log_rotate_interval, + backupCount=options.log_file_num_backups) + else: + error_message = 'The value of log_rotate_mode option should be ' +\ + '"size" or "time", not "%s".' % rotate_mode + raise ValueError(error_message) channel.setFormatter(LogFormatter(color=False)) logger.addHandler(channel) @@ -235,4 +247,13 @@ def define_logging_options(options=None): options.define("log_file_num_backups", type=int, default=10, help="number of log files to keep") + options.define("log_rotate_when", type=str, default='midnight', + help=("specify the type of TimedRotatingFileHandler interval " + "other options:('S', 'M', 'H', 'D', 'W0'-'W6')")) + options.define("log_rotate_interval", type=int, default=1, + help="The interval value of timed rotating") + + options.define("log_rotate_mode", type=str, default='size', + help="The mode of rotating files(time or size)") + options.add_parse_callback(lambda: enable_pretty_logging(options)) diff --git a/lib/tornado/netutil.py b/lib/tornado/netutil.py index 9aa292c417..4fc8d04d9c 100644 --- a/lib/tornado/netutil.py +++ b/lib/tornado/netutil.py @@ -111,7 +111,7 @@ def bind_sockets(port, address=None, family=socket.AF_UNSPEC, - backlog=_DEFAULT_BACKLOG, flags=None): + backlog=_DEFAULT_BACKLOG, flags=None, reuse_port=False): """Creates listening sockets bound to the given port and address. Returns a list of socket objects (multiple sockets are returned if @@ -130,7 +130,14 @@ def bind_sockets(port, address=None, family=socket.AF_UNSPEC, ``flags`` is a bitmask of AI_* flags to `~socket.getaddrinfo`, like ``socket.AI_PASSIVE | socket.AI_NUMERICHOST``. + + ``resuse_port`` option sets ``SO_REUSEPORT`` option for every socket + in the list. If your platform doesn't support this option ValueError will + be raised. """ + if reuse_port and not hasattr(socket, "SO_REUSEPORT"): + raise ValueError("the platform doesn't support SO_REUSEPORT") + sockets = [] if address == "": address = None @@ -165,6 +172,8 @@ def bind_sockets(port, address=None, family=socket.AF_UNSPEC, set_close_exec(sock.fileno()) if os.name != 'nt': sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + if reuse_port: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) if af == socket.AF_INET6: # On linux, ipv6 sockets accept ipv4 too by default, # but this makes it impossible to bind to both diff --git a/lib/tornado/options.py b/lib/tornado/options.py index 89a9e43265..ba16b1a7fd 100644 --- a/lib/tornado/options.py +++ b/lib/tornado/options.py @@ -68,6 +68,12 @@ def connect(): from tornado.options import options, parse_command_line options.logging = None parse_command_line() + +.. versionchanged:: 4.3 + Dashes and underscores are fully interchangeable in option names; + options can be defined, set, and read with any mix of the two. + Dashes are typical for command-line usage while config files require + underscores. """ from __future__ import absolute_import, division, print_function, with_statement @@ -103,28 +109,38 @@ def __init__(self): self.define("help", type=bool, help="show this help information", callback=self._help_callback) + def _normalize_name(self, name): + return name.replace('_', '-') + def __getattr__(self, name): + name = self._normalize_name(name) if isinstance(self._options.get(name), _Option): return self._options[name].value() raise AttributeError("Unrecognized option %r" % name) def __setattr__(self, name, value): + name = self._normalize_name(name) if isinstance(self._options.get(name), _Option): return self._options[name].set(value) raise AttributeError("Unrecognized option %r" % name) def __iter__(self): - return iter(self._options) + return (opt.name for opt in self._options.values()) + + def __contains__(self, name): + name = self._normalize_name(name) + return name in self._options - def __getitem__(self, item): - return self._options[item].value() + def __getitem__(self, name): + name = self._normalize_name(name) + return self._options[name].value() def items(self): """A sequence of (name, value) pairs. .. versionadded:: 3.1 """ - return [(name, opt.value()) for name, opt in self._options.items()] + return [(opt.name, opt.value()) for name, opt in self._options.items()] def groups(self): """The set of option-groups created by ``define``. @@ -151,7 +167,7 @@ def group_dict(self, group): .. versionadded:: 3.1 """ return dict( - (name, opt.value()) for name, opt in self._options.items() + (opt.name, opt.value()) for name, opt in self._options.items() if not group or group == opt.group_name) def as_dict(self): @@ -160,7 +176,7 @@ def as_dict(self): .. versionadded:: 3.1 """ return dict( - (name, opt.value()) for name, opt in self._options.items()) + (opt.name, opt.value()) for name, opt in self._options.items()) def define(self, name, default=None, type=None, help=None, metavar=None, multiple=False, group=None, callback=None): @@ -223,11 +239,13 @@ def define(self, name, default=None, type=None, help=None, metavar=None, group_name = group else: group_name = file_name - self._options[name] = _Option(name, file_name=file_name, - default=default, type=type, help=help, - metavar=metavar, multiple=multiple, - group_name=group_name, - callback=callback) + normalized = self._normalize_name(name) + option = _Option(name, file_name=file_name, + default=default, type=type, help=help, + metavar=metavar, multiple=multiple, + group_name=group_name, + callback=callback) + self._options[normalized] = option def parse_command_line(self, args=None, final=True): """Parses all options given on the command line (defaults to @@ -255,7 +273,7 @@ def parse_command_line(self, args=None, final=True): break arg = args[i].lstrip("-") name, equals, value = arg.partition("=") - name = name.replace('-', '_') + name = self._normalize_name(name) if name not in self._options: self.print_help() raise Error('Unrecognized command line option: %r' % name) @@ -287,8 +305,9 @@ def parse_config_file(self, path, final=True): with open(path, 'rb') as f: exec_in(native_str(f.read()), config, config) for name in config: - if name in self._options: - self._options[name].set(config[name]) + normalized = self._normalize_name(name) + if normalized in self._options: + self._options[normalized].set(config[name]) if final: self.run_parse_callbacks() @@ -308,7 +327,8 @@ def print_help(self, file=None): print("\n%s options:\n" % os.path.normpath(filename), file=file) o.sort(key=lambda option: option.name) for option in o: - prefix = option.name + # Always print names with dashes in a CLI context. + prefix = self._normalize_name(option.name) if option.metavar: prefix += "=" + option.metavar description = option.help or "" @@ -467,19 +487,17 @@ def _parse_datetime(self, value): pass raise Error('Unrecognized date/time format: %r' % value) - _TIMEDELTA_ABBREVS = [ - ('hours', ['h']), - ('minutes', ['m', 'min']), - ('seconds', ['s', 'sec']), - ('milliseconds', ['ms']), - ('microseconds', ['us']), - ('days', ['d']), - ('weeks', ['w']), - ] - - _TIMEDELTA_ABBREV_DICT = dict( - (abbrev, full) for full, abbrevs in _TIMEDELTA_ABBREVS - for abbrev in abbrevs) + _TIMEDELTA_ABBREV_DICT = { + 'h': 'hours', + 'm': 'minutes', + 'min': 'minutes', + 's': 'seconds', + 'sec': 'seconds', + 'ms': 'milliseconds', + 'us': 'microseconds', + 'd': 'days', + 'w': 'weeks', + } _FLOAT_PATTERN = r'[-+]?(?:\d+(?:\.\d*)?|\.\d+)(?:[eE][-+]?\d+)?' diff --git a/lib/tornado/platform/asyncio.py b/lib/tornado/platform/asyncio.py index 8f3dbff640..bf0428ec51 100644 --- a/lib/tornado/platform/asyncio.py +++ b/lib/tornado/platform/asyncio.py @@ -1,12 +1,22 @@ """Bridges between the `asyncio` module and Tornado IOLoop. -This is a work in progress and interfaces are subject to change. +.. versionadded:: 3.2 -To test: -python3.4 -m tornado.test.runtests --ioloop=tornado.platform.asyncio.AsyncIOLoop -python3.4 -m tornado.test.runtests --ioloop=tornado.platform.asyncio.AsyncIOMainLoop -(the tests log a few warnings with AsyncIOMainLoop because they leave some -unfinished callbacks on the event loop that fail when it resumes) +This module integrates Tornado with the ``asyncio`` module introduced +in Python 3.4 (and available `as a separate download +`_ for Python 3.3). This makes +it possible to combine the two libraries on the same event loop. + +Most applications should use `AsyncIOMainLoop` to run Tornado on the +default ``asyncio`` event loop. Applications that need to run event +loops on multiple threads may use `AsyncIOLoop` to create multiple +loops. + +.. note:: + + Tornado requires the `~asyncio.BaseEventLoop.add_reader` family of methods, + so it is not compatible with the `~asyncio.ProactorEventLoop` on Windows. + Use the `~asyncio.SelectorEventLoop` instead. """ from __future__ import absolute_import, division, print_function, with_statement @@ -35,7 +45,6 @@ def initialize(self, asyncio_loop, close_loop=False, **kwargs): super(BaseAsyncIOLoop, self).initialize(**kwargs) self.asyncio_loop = asyncio_loop self.close_loop = close_loop - self.asyncio_loop.call_soon(self.make_current) # Maps fd to (fileobj, handler function) pair (as in IOLoop.add_handler) self.handlers = {} # Set of fds listening for reads/writes @@ -105,8 +114,16 @@ def _handle_events(self, fd, events): handler_func(fileobj, events) def start(self): - self._setup_logging() - self.asyncio_loop.run_forever() + old_current = IOLoop.current(instance=False) + try: + self._setup_logging() + self.make_current() + self.asyncio_loop.run_forever() + finally: + if old_current is None: + IOLoop.clear_current() + else: + old_current.make_current() def stop(self): self.asyncio_loop.stop() @@ -133,26 +150,64 @@ def add_callback(self, callback, *args, **kwargs): class AsyncIOMainLoop(BaseAsyncIOLoop): + """``AsyncIOMainLoop`` creates an `.IOLoop` that corresponds to the + current ``asyncio`` event loop (i.e. the one returned by + ``asyncio.get_event_loop()``). Recommended usage:: + + from tornado.platform.asyncio import AsyncIOMainLoop + import asyncio + AsyncIOMainLoop().install() + asyncio.get_event_loop().run_forever() + """ def initialize(self, **kwargs): super(AsyncIOMainLoop, self).initialize(asyncio.get_event_loop(), close_loop=False, **kwargs) class AsyncIOLoop(BaseAsyncIOLoop): + """``AsyncIOLoop`` is an `.IOLoop` that runs on an ``asyncio`` event loop. + This class follows the usual Tornado semantics for creating new + ``IOLoops``; these loops are not necessarily related to the + ``asyncio`` default event loop. Recommended usage:: + + from tornado.ioloop import IOLoop + IOLoop.configure('tornado.platform.asyncio.AsyncIOLoop') + IOLoop.current().start() + + Each ``AsyncIOLoop`` creates a new ``asyncio.EventLoop``; this object + can be accessed with the ``asyncio_loop`` attribute. + """ def initialize(self, **kwargs): - super(AsyncIOLoop, self).initialize(asyncio.new_event_loop(), - close_loop=True, **kwargs) + loop = asyncio.new_event_loop() + try: + super(AsyncIOLoop, self).initialize(loop, close_loop=True, **kwargs) + except Exception: + # If initialize() does not succeed (taking ownership of the loop), + # we have to close it. + loop.close() + raise def to_tornado_future(asyncio_future): - """Convert an ``asyncio.Future`` to a `tornado.concurrent.Future`.""" + """Convert an `asyncio.Future` to a `tornado.concurrent.Future`. + + .. versionadded:: 4.1 + """ tf = tornado.concurrent.Future() tornado.concurrent.chain_future(asyncio_future, tf) return tf def to_asyncio_future(tornado_future): - """Convert a `tornado.concurrent.Future` to an ``asyncio.Future``.""" + """Convert a Tornado yieldable object to an `asyncio.Future`. + + .. versionadded:: 4.1 + + .. versionchanged:: 4.3 + Now accepts any yieldable object, not just + `tornado.concurrent.Future`. + """ + tornado_future = convert_yielded(tornado_future) af = asyncio.Future() tornado.concurrent.chain_future(tornado_future, af) return af diff --git a/lib/tornado/platform/twisted.py b/lib/tornado/platform/twisted.py index 7b3c8ca5e3..d3a4e75d1c 100644 --- a/lib/tornado/platform/twisted.py +++ b/lib/tornado/platform/twisted.py @@ -12,10 +12,6 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. - -# Note: This module's docs are not currently extracted automatically, -# so changes must be made manually to twisted.rst -# TODO: refactor doc build process to use an appropriate virtualenv """Bridges between the Twisted reactor and Tornado IOLoop. This module lets you run applications and libraries written for @@ -23,45 +19,6 @@ depending on which library's underlying event loop you want to use. This module has been tested with Twisted versions 11.0.0 and newer. - -Twisted on Tornado ------------------- - -`TornadoReactor` implements the Twisted reactor interface on top of -the Tornado IOLoop. To use it, simply call `install` at the beginning -of the application:: - - import tornado.platform.twisted - tornado.platform.twisted.install() - from twisted.internet import reactor - -When the app is ready to start, call `IOLoop.current().start()` -instead of `reactor.run()`. - -It is also possible to create a non-global reactor by calling -`tornado.platform.twisted.TornadoReactor(io_loop)`. However, if -the `IOLoop` and reactor are to be short-lived (such as those used in -unit tests), additional cleanup may be required. Specifically, it is -recommended to call:: - - reactor.fireSystemEvent('shutdown') - reactor.disconnectAll() - -before closing the `IOLoop`. - -Tornado on Twisted ------------------- - -`TwistedIOLoop` implements the Tornado IOLoop interface on top of the Twisted -reactor. Recommended usage:: - - from tornado.platform.twisted import TwistedIOLoop - from twisted.internet import reactor - TwistedIOLoop().install() - # Set up your tornado application as usual using `IOLoop.instance` - reactor.run() - -`TwistedIOLoop` always uses the global Twisted reactor. """ from __future__ import absolute_import, division, print_function, with_statement @@ -144,12 +101,27 @@ def active(self): class TornadoReactor(PosixReactorBase): """Twisted reactor built on the Tornado IOLoop. - Since it is intended to be used in applications where the top-level - event loop is ``io_loop.start()`` rather than ``reactor.run()``, - it is implemented a little differently than other Twisted reactors. - We override `mainLoop` instead of `doIteration` and must implement - timed call functionality on top of `IOLoop.add_timeout` rather than - using the implementation in `PosixReactorBase`. + `TornadoReactor` implements the Twisted reactor interface on top of + the Tornado IOLoop. To use it, simply call `install` at the beginning + of the application:: + + import tornado.platform.twisted + tornado.platform.twisted.install() + from twisted.internet import reactor + + When the app is ready to start, call ``IOLoop.current().start()`` + instead of ``reactor.run()``. + + It is also possible to create a non-global reactor by calling + ``tornado.platform.twisted.TornadoReactor(io_loop)``. However, if + the `.IOLoop` and reactor are to be short-lived (such as those used in + unit tests), additional cleanup may be required. Specifically, it is + recommended to call:: + + reactor.fireSystemEvent('shutdown') + reactor.disconnectAll() + + before closing the `.IOLoop`. .. versionchanged:: 4.1 The ``io_loop`` argument is deprecated. @@ -191,7 +163,6 @@ def _removeDelayedCall(self, dc): # IReactorThreads def callFromThread(self, f, *args, **kw): - """See `twisted.internet.interfaces.IReactorThreads.callFromThread`""" assert callable(f), "%s is not callable" % f with NullContext(): # This NullContext is mainly for an edge case when running @@ -237,7 +208,6 @@ def _invoke_callback(self, fd, events): writer.writeConnectionLost(failure.Failure(err)) def addReader(self, reader): - """Add a FileDescriptor for notification of data available to read.""" if reader in self._readers: # Don't add the reader if it's already there return @@ -257,7 +227,6 @@ def addReader(self, reader): IOLoop.READ) def addWriter(self, writer): - """Add a FileDescriptor for notification of data available to write.""" if writer in self._writers: return fd = writer.fileno() @@ -276,7 +245,6 @@ def addWriter(self, writer): IOLoop.WRITE) def removeReader(self, reader): - """Remove a Selectable for notification of data available to read.""" if reader in self._readers: fd = self._readers.pop(reader) (_, writer) = self._fds[fd] @@ -293,7 +261,6 @@ def removeReader(self, reader): self._io_loop.remove_handler(fd) def removeWriter(self, writer): - """Remove a Selectable for notification of data available to write.""" if writer in self._writers: fd = self._writers.pop(writer) (reader, _) = self._fds[fd] @@ -334,6 +301,14 @@ def doIteration(self, delay): raise NotImplementedError("doIteration") def mainLoop(self): + # Since this class is intended to be used in applications + # where the top-level event loop is ``io_loop.start()`` rather + # than ``reactor.run()``, it is implemented a little + # differently than other Twisted reactors. We override + # ``mainLoop`` instead of ``doIteration`` and must implement + # timed call functionality on top of `.IOLoop.add_timeout` + # rather than using the implementation in + # ``PosixReactorBase``. self._io_loop.start() @@ -364,8 +339,17 @@ def listenUDP(self, port, protocol, interface='', maxPacketSize=8192): def install(io_loop=None): """Install this package as the default Twisted reactor. + ``install()`` must be called very early in the startup process, + before most other twisted-related imports. Conversely, because it + initializes the `.IOLoop`, it cannot be called before + `.fork_processes` or multi-process `~.TCPServer.start`. These + conflicting requirements make it difficult to use `.TornadoReactor` + in multi-process mode, and an external process manager such as + ``supervisord`` is recommended instead. + .. versionchanged:: 4.1 The ``io_loop`` argument is deprecated. + """ if not io_loop: io_loop = tornado.ioloop.IOLoop.current() @@ -408,8 +392,17 @@ def logPrefix(self): class TwistedIOLoop(tornado.ioloop.IOLoop): """IOLoop implementation that runs on Twisted. + `TwistedIOLoop` implements the Tornado IOLoop interface on top of + the Twisted reactor. Recommended usage:: + + from tornado.platform.twisted import TwistedIOLoop + from twisted.internet import reactor + TwistedIOLoop().install() + # Set up your tornado application as usual using `IOLoop.instance` + reactor.run() + Uses the global Twisted reactor by default. To create multiple - `TwistedIOLoops` in the same process, you must pass a unique reactor + ``TwistedIOLoops`` in the same process, you must pass a unique reactor when constructing each one. Not compatible with `tornado.process.Subprocess.set_exit_callback` @@ -423,7 +416,6 @@ def initialize(self, reactor=None, **kwargs): reactor = twisted.internet.reactor self.reactor = reactor self.fds = {} - self.reactor.callWhenRunning(self.make_current) def close(self, all_fds=False): fds = self.fds @@ -477,8 +469,16 @@ def remove_handler(self, fd): del self.fds[fd] def start(self): - self._setup_logging() - self.reactor.run() + old_current = IOLoop.current(instance=False) + try: + self._setup_logging() + self.make_current() + self.reactor.run() + finally: + if old_current is None: + IOLoop.clear_current() + else: + old_current.make_current() def stop(self): self.reactor.crash() diff --git a/lib/tornado/process.py b/lib/tornado/process.py index f580e19253..daa9677bb5 100644 --- a/lib/tornado/process.py +++ b/lib/tornado/process.py @@ -50,7 +50,14 @@ # Re-export this exception for convenience. -CalledProcessError = subprocess.CalledProcessError +try: + CalledProcessError = subprocess.CalledProcessError +except AttributeError: + # The subprocess module exists in Google App Engine, but is empty. + # This module isn't very useful in that case, but it should + # at least be importable. + if 'APPENGINE_RUNTIME' not in os.environ: + raise def cpu_count(): diff --git a/lib/tornado/queues.py b/lib/tornado/queues.py index 55ab4834ed..129b204e36 100644 --- a/lib/tornado/queues.py +++ b/lib/tornado/queues.py @@ -44,6 +44,14 @@ def on_timeout(): lambda _: io_loop.remove_timeout(timeout_handle)) +class _QueueIterator(object): + def __init__(self, q): + self.q = q + + def __anext__(self): + return self.q.get() + + class Queue(object): """Coordinate producer and consumer coroutines. @@ -51,7 +59,11 @@ class Queue(object): .. testcode:: - q = queues.Queue(maxsize=2) + from tornado import gen + from tornado.ioloop import IOLoop + from tornado.queues import Queue + + q = Queue(maxsize=2) @gen.coroutine def consumer(): @@ -71,19 +83,20 @@ def producer(): @gen.coroutine def main(): - consumer() # Start consumer. + # Start consumer without waiting (since it never finishes). + IOLoop.current().spawn_callback(consumer) yield producer() # Wait for producer to put all tasks. yield q.join() # Wait for consumer to finish all tasks. print('Done') - io_loop.run_sync(main) + IOLoop.current().run_sync(main) .. testoutput:: Put 0 Put 1 - Put 2 Doing work on 0 + Put 2 Doing work on 1 Put 3 Doing work on 2 @@ -91,6 +104,21 @@ def main(): Doing work on 3 Doing work on 4 Done + + In Python 3.5, `Queue` implements the async iterator protocol, so + ``consumer()`` could be rewritten as:: + + async def consumer(): + async for item in q: + try: + print('Doing work on %s' % item) + yield gen.sleep(0.01) + finally: + q.task_done() + + .. versionchanged:: 4.3 + Added ``async for`` support in Python 3.5. + """ def __init__(self, maxsize=0): if maxsize is None: @@ -215,6 +243,10 @@ def join(self, timeout=None): """ return self._finished.wait(timeout) + @gen.coroutine + def __aiter__(self): + return _QueueIterator(self) + # These three are overridable in subclasses. def _init(self): self._queue = collections.deque() @@ -266,7 +298,9 @@ class PriorityQueue(Queue): .. testcode:: - q = queues.PriorityQueue() + from tornado.queues import PriorityQueue + + q = PriorityQueue() q.put((1, 'medium-priority item')) q.put((0, 'high-priority item')) q.put((10, 'low-priority item')) @@ -296,7 +330,9 @@ class LifoQueue(Queue): .. testcode:: - q = queues.LifoQueue() + from tornado.queues import LifoQueue + + q = LifoQueue() q.put(3) q.put(2) q.put(1) diff --git a/lib/tornado/simple_httpclient.py b/lib/tornado/simple_httpclient.py index cf58e16263..37b0bc27fd 100644 --- a/lib/tornado/simple_httpclient.py +++ b/lib/tornado/simple_httpclient.py @@ -1,8 +1,8 @@ #!/usr/bin/env python from __future__ import absolute_import, division, print_function, with_statement -from tornado.concurrent import is_future from tornado.escape import utf8, _unicode +from tornado import gen from tornado.httpclient import HTTPResponse, HTTPError, AsyncHTTPClient, main, _RequestProxy from tornado import httputil from tornado.http1connection import HTTP1Connection, HTTP1ConnectionParameters @@ -391,7 +391,9 @@ def _write_body(self, start_read): self.connection.write(self.request.body) elif self.request.body_producer is not None: fut = self.request.body_producer(self.connection.write) - if is_future(fut): + if fut is not None: + fut = gen.convert_yielded(fut) + def on_body_written(fut): fut.result() self.connection.finish() @@ -427,7 +429,10 @@ def _handle_exception(self, typ, value, tb): if self.final_callback: self._remove_timeout() if isinstance(value, StreamClosedError): - value = HTTPError(599, "Stream closed") + if value.real_error is None: + value = HTTPError(599, "Stream closed") + else: + value = value.real_error self._run_callback(HTTPResponse(self.request, 599, error=value, request_time=self.io_loop.time() - self.start_time, )) @@ -459,9 +464,12 @@ def headers_received(self, first_line, headers): if self.request.expect_100_continue and first_line.code == 100: self._write_body(False) return - self.headers = headers self.code = first_line.code self.reason = first_line.reason + self.headers = headers + + if self._should_follow_redirect(): + return if self.request.header_callback is not None: # Reassemble the start line. @@ -470,14 +478,17 @@ def headers_received(self, first_line, headers): self.request.header_callback("%s: %s\r\n" % (k, v)) self.request.header_callback('\r\n') + def _should_follow_redirect(self): + return (self.request.follow_redirects and + self.request.max_redirects > 0 and + self.code in (301, 302, 303, 307)) + def finish(self): data = b''.join(self.chunks) self._remove_timeout() original_request = getattr(self.request, "original_request", self.request) - if (self.request.follow_redirects and - self.request.max_redirects > 0 and - self.code in (301, 302, 303, 307)): + if self._should_follow_redirect(): assert isinstance(self.request, _RequestProxy) new_request = copy.copy(self.request.request) new_request.url = urlparse.urljoin(self.request.url, @@ -524,6 +535,9 @@ def _on_end_request(self): self.stream.close() def data_received(self, chunk): + if self._should_follow_redirect(): + # We're going to follow a redirect so just discard the body. + return if self.request.streaming_callback is not None: self.request.streaming_callback(chunk) else: diff --git a/lib/tornado/speedups.c b/lib/tornado/speedups.c deleted file mode 100644 index 174a6129e6..0000000000 --- a/lib/tornado/speedups.c +++ /dev/null @@ -1,52 +0,0 @@ -#define PY_SSIZE_T_CLEAN -#include - -static PyObject* websocket_mask(PyObject* self, PyObject* args) { - const char* mask; - Py_ssize_t mask_len; - const char* data; - Py_ssize_t data_len; - Py_ssize_t i; - PyObject* result; - char* buf; - - if (!PyArg_ParseTuple(args, "s#s#", &mask, &mask_len, &data, &data_len)) { - return NULL; - } - - result = PyBytes_FromStringAndSize(NULL, data_len); - if (!result) { - return NULL; - } - buf = PyBytes_AsString(result); - for (i = 0; i < data_len; i++) { - buf[i] = data[i] ^ mask[i % 4]; - } - - return result; -} - -static PyMethodDef methods[] = { - {"websocket_mask", websocket_mask, METH_VARARGS, ""}, - {NULL, NULL, 0, NULL} -}; - -#if PY_MAJOR_VERSION >= 3 -static struct PyModuleDef speedupsmodule = { - PyModuleDef_HEAD_INIT, - "speedups", - NULL, - -1, - methods -}; - -PyMODINIT_FUNC -PyInit_speedups() { - return PyModule_Create(&speedupsmodule); -} -#else // Python 2.x -PyMODINIT_FUNC -initspeedups() { - Py_InitModule("tornado.speedups", methods); -} -#endif diff --git a/lib/tornado/speedups.pyd b/lib/tornado/speedups.pyd new file mode 100644 index 0000000000000000000000000000000000000000..120974439e511b576ccfa9c88c8646eba68931ec GIT binary patch literal 6656 zcmeHLe{dXC9e;P13)iISrP77)BlOx_X5w_n#9Xuqm*5>q;6*nWngF3KY_4~k=4^6z z-tB3l*ePBk9B)o5cGNODfPg#j(eW6fTacq(u(Rj!a3^|%U*Y4;G2jt44q5`Mk`j_4{&rCa- zCv*K|=g{O7#wR;pot)A5O_OhF<9(ASG(0x>riMo~To?5BaNET)ZEGeZ@O3MBs@QR7 zPG^>^b`+YI5K;inQk^@y0$8lk+f5ueZ6?IZA80uwT^R6Uq3IIc$glbY+>LpF(wc*iuF7b@r}zkY z-3k^MBp-bl`n=}B@~G6+NQU#P(0MU<4d*ovA?eD9uBgos3{CHfc}gI`_EbjY-mqVH zr4@+#D|wIJO$#}fM*nLCXkvy^HcV5ZvrELImL^4K1t0c^sU~L?A3A7CIJy#Rm+e=D zZ)5Qcq~n*l(loJ`j!@R&>~bBT$&F5rDr|$0vJ8bn0cBtrIOjG;h-I&#NOJGF;6?R9eW7&JLbjNY(KJB zg>P~?Y5fUzlCO*NXP%V?sEWDjEJ!2NkZ_QyJX3eK|+gVqY;*f26SM%iAP z?6lEjPccnyEfJGl4)5*WJG^)Dur9TgO^9sDeat>kiwv06ia733`!~vXKKq;4nWXjY z>j;r4%PZ$QjIvJKbTM_0C|B>vYE#*ajp<6yQv0tvtr)^ zuO#cocAk0+BFc(hf=CrMz*b}v6cKlPNAq6vD@||{1g-3X$Yv?a(!^<7i+d)zp4NSp zC~v94wUE-fW}^I=vR1fR!N;@RXrV;?xhlf1vdg7MTb(602= zvsXau9*ysS!c)4!l%6h$|`u1Kx7|;G6F7d zWqY6H>MiWBDl|i)S}k}E@<96HX1z}A&3m0~A`3V>v1T1!$$O9YZY-^fYb&CyEpG?b zR8)aJ>Vzji#S2`=!Jaw@G3usR6YrSgF3PO=d@=)R z#iow4OcGmV;J9+R$o8>gcstpef2wMZZQqfC@tmj>lhH*9)fE3E_WdUya~fiUOQt!su_*f{4rz)J>}oq3!?*_CI<6BDLsvv$s-dTMzIJ|)%i zxABJ6@{Jn3OnVmN$^G-iRZJ5Nk6=vV`Ks^)RP}v6SN{^d{>(a!j~i_y#>8?6?ptJ;9WdTe-l0<^35c_+FWHST}n## z7Lrn>fRuJDA*IkSt-fmZ$_ZiI&}q9$un_@rA+Ur9kPGXUr#rAo;GLYhoRB}@|E)Ic zOOAiS_z-YA#=M#A`NST$jM!^{)rFDeMqF;szYbyoA+MqD&gmEAkpk!zROd&Q@VVVY zNKW@M*xZ1ghAu~KE|8lv8=Gd+Ij`OmpwkXQGOKwedduLwc^rg%Zys+eUrXTqr}qD{ zF5~95{&poA3~iRYF`XdyUF6*y4fp9>Zz#|n{IX2Q7c5d+zc;#BYV$>7az{MUD@$C` zL2kFS^-C?Gpwb!+#Czo$=~Fc%(y#P{L)B|51HHY3tTjn;$d8|3q)0dzQsgMP)hzY- zdhxHf&3On&p*UP#Z;}G?0*@z&SeC!&1oC zC&xfS9`Ygo$1PGkgk-mn0cc}N;gA-l!7ORqNwZ{dTHEhvTvu16O*@^}-Yd%ya*{L! zW4=wjaz{^8_65Y?rl>F4--zkQ$WJY|#pP&!n;h*9NBevszuXv(YZZFfe2c7f1pQky z26mYVFoDkXcL3Mo z{;3De@4OiLF8Jj49lygk&GAWt=6BTzgZ}V~_7U>vFn$7m40Ec|e;Rf?TJdP@qaUF; KUYg3H75F!0k`>$l literal 0 HcmV?d00001 diff --git a/lib/tornado/template.py b/lib/tornado/template.py index 3882ed02a8..fa588991e7 100644 --- a/lib/tornado/template.py +++ b/lib/tornado/template.py @@ -186,6 +186,11 @@ class (and specifically its ``render`` method) and will not work ``{% while *condition* %}... {% end %}`` Same as the python ``while`` statement. ``{% break %}`` and ``{% continue %}`` may be used inside the loop. + +``{% whitespace *mode* %}`` + Sets the whitespace mode for the remainder of the current file + (or until the next ``{% whitespace %}`` directive). See + `filter_whitespace` for available options. New in Tornado 4.3. """ from __future__ import absolute_import, division, print_function, with_statement @@ -210,6 +215,31 @@ class (and specifically its ``render`` method) and will not work _UNSET = object() +def filter_whitespace(mode, text): + """Transform whitespace in ``text`` according to ``mode``. + + Available modes are: + + * ``all``: Return all whitespace unmodified. + * ``single``: Collapse consecutive whitespace with a single whitespace + character, preserving newlines. + * ``oneline``: Collapse all runs of whitespace into a single space + character, removing all newlines in the process. + + .. versionadded:: 4.3 + """ + if mode == 'all': + return text + elif mode == 'single': + text = re.sub(r"([\t ]+)", " ", text) + text = re.sub(r"(\s*\n\s*)", "\n", text) + return text + elif mode == 'oneline': + return re.sub(r"(\s+)", " ", text) + else: + raise Exception("invalid whitespace mode %s" % mode) + + class Template(object): """A compiled template. @@ -220,21 +250,58 @@ class Template(object): # autodoc because _UNSET looks like garbage. When changing # this signature update website/sphinx/template.rst too. def __init__(self, template_string, name="", loader=None, - compress_whitespace=None, autoescape=_UNSET): - self.name = name - if compress_whitespace is None: - compress_whitespace = name.endswith(".html") or \ - name.endswith(".js") + compress_whitespace=_UNSET, autoescape=_UNSET, + whitespace=None): + """Construct a Template. + + :arg str template_string: the contents of the template file. + :arg str name: the filename from which the template was loaded + (used for error message). + :arg tornado.template.BaseLoader loader: the `~tornado.template.BaseLoader` responsible for this template, + used to resolve ``{% include %}`` and ``{% extend %}`` + directives. + :arg bool compress_whitespace: Deprecated since Tornado 4.3. + Equivalent to ``whitespace="single"`` if true and + ``whitespace="all"`` if false. + :arg str autoescape: The name of a function in the template + namespace, or ``None`` to disable escaping by default. + :arg str whitespace: A string specifying treatment of whitespace; + see `filter_whitespace` for options. + + .. versionchanged:: 4.3 + Added ``whitespace`` parameter; deprecated ``compress_whitespace``. + """ + self.name = escape.native_str(name) + + if compress_whitespace is not _UNSET: + # Convert deprecated compress_whitespace (bool) to whitespace (str). + if whitespace is not None: + raise Exception("cannot set both whitespace and compress_whitespace") + whitespace = "single" if compress_whitespace else "all" + if whitespace is None: + if loader and loader.whitespace: + whitespace = loader.whitespace + else: + # Whitespace defaults by filename. + if name.endswith(".html") or name.endswith(".js"): + whitespace = "single" + else: + whitespace = "all" + # Validate the whitespace setting. + filter_whitespace(whitespace, '') + if autoescape is not _UNSET: self.autoescape = autoescape elif loader: self.autoescape = loader.autoescape else: self.autoescape = _DEFAULT_AUTOESCAPE + self.namespace = loader.namespace if loader else {} - reader = _TemplateReader(name, escape.native_str(template_string)) + reader = _TemplateReader(name, escape.native_str(template_string), + whitespace) self.file = _File(self, _parse(reader, self)) - self.code = self._generate_python(loader, compress_whitespace) + self.code = self._generate_python(loader) self.loader = loader try: # Under python2.5, the fake filename used here must match @@ -277,7 +344,7 @@ def generate(self, **kwargs): linecache.clearcache() return execute() - def _generate_python(self, loader, compress_whitespace): + def _generate_python(self, loader): buffer = StringIO() try: # named_blocks maps from names to _NamedBlock objects @@ -286,8 +353,8 @@ def _generate_python(self, loader, compress_whitespace): ancestors.reverse() for ancestor in ancestors: ancestor.find_named_blocks(loader, named_blocks) - writer = _CodeWriter(buffer, named_blocks, loader, ancestors[0].template, - compress_whitespace) + writer = _CodeWriter(buffer, named_blocks, loader, + ancestors[0].template) ancestors[0].generate(writer) return buffer.getvalue() finally: @@ -312,12 +379,26 @@ class BaseLoader(object): ``{% extends %}`` and ``{% include %}``. The loader caches all templates after they are loaded the first time. """ - def __init__(self, autoescape=_DEFAULT_AUTOESCAPE, namespace=None): - """``autoescape`` must be either None or a string naming a function - in the template namespace, such as "xhtml_escape". + def __init__(self, autoescape=_DEFAULT_AUTOESCAPE, namespace=None, + whitespace=None): + """Construct a template loader. + + :arg str autoescape: The name of a function in the template + namespace, such as "xhtml_escape", or ``None`` to disable + autoescaping by default. + :arg dict namespace: A dictionary to be added to the default template + namespace, or ``None``. + :arg str whitespace: A string specifying default behavior for + whitespace in templates; see `filter_whitespace` for options. + Default is "single" for files ending in ".html" and ".js" and + "all" for other files. + + .. versionchanged:: 4.3 + Added ``whitespace`` parameter. """ self.autoescape = autoescape self.namespace = namespace or {} + self.whitespace = whitespace self.templates = {} # self.lock protects self.templates. It's a reentrant lock # because templates may load other templates via `include` or @@ -558,37 +639,49 @@ def __init__(self, expression, line): class _Text(_Node): - def __init__(self, value, line): + def __init__(self, value, line, whitespace): self.value = value self.line = line + self.whitespace = whitespace def generate(self, writer): value = self.value - # Compress lots of white space to a single character. If the whitespace - # breaks a line, have it continue to break a line, but just with a - # single \n character - if writer.compress_whitespace and "
" not in value:
-            value = re.sub(r"([\t ]+)", " ", value)
-            value = re.sub(r"(\s*\n\s*)", "\n", value)
+        # Compress whitespace if requested, with a crude heuristic to avoid
+        # altering preformatted whitespace.
+        if "
" not in value:
+            value = filter_whitespace(self.whitespace, value)
 
         if value:
             writer.write_line('_tt_append(%r)' % escape.utf8(value), self.line)
 
 
 class ParseError(Exception):
-    """Raised for template syntax errors."""
-    pass
+    """Raised for template syntax errors.
+
+    ``ParseError`` instances have ``filename`` and ``lineno`` attributes
+    indicating the position of the error.
+
+    .. versionchanged:: 4.3
+       Added ``filename`` and ``lineno`` attributes.
+    """
+    def __init__(self, message, filename, lineno):
+        self.message = message
+        # The names "filename" and "lineno" are chosen for consistency
+        # with python SyntaxError.
+        self.filename = filename
+        self.lineno = lineno
+
+    def __str__(self):
+        return '%s at %s:%d' % (self.message, self.filename, self.lineno)
 
 
 class _CodeWriter(object):
-    def __init__(self, file, named_blocks, loader, current_template,
-                 compress_whitespace):
+    def __init__(self, file, named_blocks, loader, current_template):
         self.file = file
         self.named_blocks = named_blocks
         self.loader = loader
         self.current_template = current_template
-        self.compress_whitespace = compress_whitespace
         self.apply_counter = 0
         self.include_stack = []
         self._indent = 0
@@ -633,9 +726,10 @@ def write_line(self, line, line_number, indent=None):
 
 
 class _TemplateReader(object):
-    def __init__(self, name, text):
+    def __init__(self, name, text, whitespace):
         self.name = name
         self.text = text
+        self.whitespace = whitespace
         self.line = 1
         self.pos = 0
 
@@ -687,6 +781,9 @@ def __getitem__(self, key):
     def __str__(self):
         return self.text[self.pos:]
 
+    def raise_parse_error(self, msg):
+        raise ParseError(msg, self.name, self.line)
+
 
 def _format_code(code):
     lines = code.splitlines()
@@ -704,9 +801,10 @@ def _parse(reader, template, in_block=None, in_loop=None):
             if curly == -1 or curly + 1 == reader.remaining():
                 # EOF
                 if in_block:
-                    raise ParseError("Missing {%% end %%} block for %s" %
-                                     in_block)
-                body.chunks.append(_Text(reader.consume(), reader.line))
+                    reader.raise_parse_error(
+                        "Missing {%% end %%} block for %s" % in_block)
+                body.chunks.append(_Text(reader.consume(), reader.line,
+                                         reader.whitespace))
                 return body
             # If the first curly brace is not the start of a special token,
             # start searching from the character after it
@@ -725,7 +823,8 @@ def _parse(reader, template, in_block=None, in_loop=None):
         # Append any text before the special token
         if curly > 0:
             cons = reader.consume(curly)
-            body.chunks.append(_Text(cons, reader.line))
+            body.chunks.append(_Text(cons, reader.line,
+                                     reader.whitespace))
 
         start_brace = reader.consume(2)
         line = reader.line
@@ -736,14 +835,15 @@ def _parse(reader, template, in_block=None, in_loop=None):
         # which also use double braces.
         if reader.remaining() and reader[0] == "!":
             reader.consume(1)
-            body.chunks.append(_Text(start_brace, line))
+            body.chunks.append(_Text(start_brace, line,
+                                     reader.whitespace))
             continue
 
         # Comment
         if start_brace == "{#":
             end = reader.find("#}")
             if end == -1:
-                raise ParseError("Missing end expression #} on line %d" % line)
+                reader.raise_parse_error("Missing end comment #}")
             contents = reader.consume(end).strip()
             reader.consume(2)
             continue
@@ -752,11 +852,11 @@ def _parse(reader, template, in_block=None, in_loop=None):
         if start_brace == "{{":
             end = reader.find("}}")
             if end == -1:
-                raise ParseError("Missing end expression }} on line %d" % line)
+                reader.raise_parse_error("Missing end expression }}")
             contents = reader.consume(end).strip()
             reader.consume(2)
             if not contents:
-                raise ParseError("Empty expression on line %d" % line)
+                reader.raise_parse_error("Empty expression")
             body.chunks.append(_Expression(contents, line))
             continue
 
@@ -764,11 +864,11 @@ def _parse(reader, template, in_block=None, in_loop=None):
         assert start_brace == "{%", start_brace
         end = reader.find("%}")
         if end == -1:
-            raise ParseError("Missing end block %%} on line %d" % line)
+            reader.raise_parse_error("Missing end block %}")
         contents = reader.consume(end).strip()
         reader.consume(2)
         if not contents:
-            raise ParseError("Empty block tag ({%% %%}) on line %d" % line)
+            reader.raise_parse_error("Empty block tag ({% %})")
 
         operator, space, suffix = contents.partition(" ")
         suffix = suffix.strip()
@@ -783,40 +883,43 @@ def _parse(reader, template, in_block=None, in_loop=None):
         allowed_parents = intermediate_blocks.get(operator)
         if allowed_parents is not None:
             if not in_block:
-                raise ParseError("%s outside %s block" %
-                                 (operator, allowed_parents))
+                reader.raise_parse_error("%s outside %s block" %
+                                         (operator, allowed_parents))
             if in_block not in allowed_parents:
-                raise ParseError("%s block cannot be attached to %s block" % (operator, in_block))
+                reader.raise_parse_error(
+                    "%s block cannot be attached to %s block" %
+                    (operator, in_block))
             body.chunks.append(_IntermediateControlBlock(contents, line))
             continue
 
         # End tag
         elif operator == "end":
             if not in_block:
-                raise ParseError("Extra {%% end %%} block on line %d" % line)
+                reader.raise_parse_error("Extra {% end %} block")
             return body
 
         elif operator in ("extends", "include", "set", "import", "from",
-                          "comment", "autoescape", "raw", "module"):
+                          "comment", "autoescape", "whitespace", "raw",
+                          "module"):
             if operator == "comment":
                 continue
             if operator == "extends":
                 suffix = suffix.strip('"').strip("'")
                 if not suffix:
-                    raise ParseError("extends missing file path on line %d" % line)
+                    reader.raise_parse_error("extends missing file path")
                 block = _ExtendsBlock(suffix)
             elif operator in ("import", "from"):
                 if not suffix:
-                    raise ParseError("import missing statement on line %d" % line)
+                    reader.raise_parse_error("import missing statement")
                 block = _Statement(contents, line)
             elif operator == "include":
                 suffix = suffix.strip('"').strip("'")
                 if not suffix:
-                    raise ParseError("include missing file path on line %d" % line)
+                    reader.raise_parse_error("include missing file path")
                 block = _IncludeBlock(suffix, reader, line)
             elif operator == "set":
                 if not suffix:
-                    raise ParseError("set missing statement on line %d" % line)
+                    reader.raise_parse_error("set missing statement")
                 block = _Statement(suffix, line)
             elif operator == "autoescape":
                 fn = suffix.strip()
@@ -824,6 +927,12 @@ def _parse(reader, template, in_block=None, in_loop=None):
                     fn = None
                 template.autoescape = fn
                 continue
+            elif operator == "whitespace":
+                mode = suffix.strip()
+                # Validate the selected mode
+                filter_whitespace(mode, '')
+                reader.whitespace = mode
+                continue
             elif operator == "raw":
                 block = _Expression(suffix, line, raw=True)
             elif operator == "module":
@@ -844,11 +953,11 @@ def _parse(reader, template, in_block=None, in_loop=None):
 
             if operator == "apply":
                 if not suffix:
-                    raise ParseError("apply missing method name on line %d" % line)
+                    reader.raise_parse_error("apply missing method name")
                 block = _ApplyBlock(suffix, line, block_body)
             elif operator == "block":
                 if not suffix:
-                    raise ParseError("block missing name on line %d" % line)
+                    reader.raise_parse_error("block missing name")
                 block = _NamedBlock(suffix, block_body, template, line)
             else:
                 block = _ControlBlock(contents, line, block_body)
@@ -857,9 +966,10 @@ def _parse(reader, template, in_block=None, in_loop=None):
 
         elif operator in ("break", "continue"):
             if not in_loop:
-                raise ParseError("%s outside %s block" % (operator, set(["for", "while"])))
+                reader.raise_parse_error("%s outside %s block" %
+                                         (operator, set(["for", "while"])))
             body.chunks.append(_Statement(contents, line))
             continue
 
         else:
-            raise ParseError("unknown operator: %r" % operator)
+            reader.raise_parse_error("unknown operator: %r" % operator)
diff --git a/lib/tornado/test/asyncio_test.py b/lib/tornado/test/asyncio_test.py
index 1be0e54f35..b50b2048ee 100644
--- a/lib/tornado/test/asyncio_test.py
+++ b/lib/tornado/test/asyncio_test.py
@@ -12,20 +12,18 @@
 
 from __future__ import absolute_import, division, print_function, with_statement
 
-import sys
-import textwrap
-
 from tornado import gen
 from tornado.testing import AsyncTestCase, gen_test
-from tornado.test.util import unittest
+from tornado.test.util import unittest, skipBefore33, skipBefore35, exec_test
 
 try:
-    from tornado.platform.asyncio import asyncio, AsyncIOLoop
+    from tornado.platform.asyncio import asyncio
 except ImportError:
     asyncio = None
-
-skipIfNoSingleDispatch = unittest.skipIf(
-    gen.singledispatch is None, "singledispatch module not present")
+else:
+    from tornado.platform.asyncio import AsyncIOLoop, to_asyncio_future
+    # This is used in dynamically-evaluated code, so silence pyflakes.
+    to_asyncio_future
 
 
 @unittest.skipIf(asyncio is None, "asyncio module not present")
@@ -40,7 +38,6 @@ def test_asyncio_callback(self):
         asyncio.get_event_loop().call_soon(self.stop)
         self.wait()
 
-    @skipIfNoSingleDispatch
     @gen_test
     def test_asyncio_future(self):
         # Test that we can yield an asyncio future from a tornado coroutine.
@@ -49,21 +46,68 @@ def test_asyncio_future(self):
             asyncio.get_event_loop().run_in_executor(None, lambda: 42))
         self.assertEqual(x, 42)
 
-    @unittest.skipIf(sys.version_info < (3, 3),
-                     'PEP 380 not available')
-    @skipIfNoSingleDispatch
+    @skipBefore33
     @gen_test
     def test_asyncio_yield_from(self):
         # Test that we can use asyncio coroutines with 'yield from'
         # instead of asyncio.async(). This requires python 3.3 syntax.
-        global_namespace = dict(globals(), **locals())
-        local_namespace = {}
-        exec(textwrap.dedent("""
+        namespace = exec_test(globals(), locals(), """
         @gen.coroutine
         def f():
             event_loop = asyncio.get_event_loop()
             x = yield from event_loop.run_in_executor(None, lambda: 42)
             return x
-        """), global_namespace, local_namespace)
-        result = yield local_namespace['f']()
+        """)
+        result = yield namespace['f']()
         self.assertEqual(result, 42)
+
+    @skipBefore35
+    def test_asyncio_adapter(self):
+        # This test demonstrates that when using the asyncio coroutine
+        # runner (i.e. run_until_complete), the to_asyncio_future
+        # adapter is needed. No adapter is needed in the other direction,
+        # as demonstrated by other tests in the package.
+        @gen.coroutine
+        def tornado_coroutine():
+            yield gen.Task(self.io_loop.add_callback)
+            raise gen.Return(42)
+        native_coroutine_without_adapter = exec_test(globals(), locals(), """
+        async def native_coroutine_without_adapter():
+            return await tornado_coroutine()
+        """)["native_coroutine_without_adapter"]
+
+        native_coroutine_with_adapter = exec_test(globals(), locals(), """
+        async def native_coroutine_with_adapter():
+            return await to_asyncio_future(tornado_coroutine())
+        """)["native_coroutine_with_adapter"]
+
+        # Use the adapter, but two degrees from the tornado coroutine.
+        native_coroutine_with_adapter2 = exec_test(globals(), locals(), """
+        async def native_coroutine_with_adapter2():
+            return await to_asyncio_future(native_coroutine_without_adapter())
+        """)["native_coroutine_with_adapter2"]
+
+        # Tornado supports native coroutines both with and without adapters
+        self.assertEqual(
+            self.io_loop.run_sync(native_coroutine_without_adapter),
+            42)
+        self.assertEqual(
+            self.io_loop.run_sync(native_coroutine_with_adapter),
+            42)
+        self.assertEqual(
+            self.io_loop.run_sync(native_coroutine_with_adapter2),
+            42)
+
+        # Asyncio only supports coroutines that yield asyncio-compatible
+        # Futures.
+        with self.assertRaises(RuntimeError):
+            asyncio.get_event_loop().run_until_complete(
+                native_coroutine_without_adapter())
+        self.assertEqual(
+            asyncio.get_event_loop().run_until_complete(
+                native_coroutine_with_adapter()),
+            42)
+        self.assertEqual(
+            asyncio.get_event_loop().run_until_complete(
+                native_coroutine_with_adapter2()),
+            42)
diff --git a/lib/tornado/test/auth_test.py b/lib/tornado/test/auth_test.py
index 541ecf16f3..59c96b232f 100644
--- a/lib/tornado/test/auth_test.py
+++ b/lib/tornado/test/auth_test.py
@@ -5,10 +5,11 @@
 
 
 from __future__ import absolute_import, division, print_function, with_statement
-from tornado.auth import OpenIdMixin, OAuthMixin, OAuth2Mixin, TwitterMixin, AuthError
+from tornado.auth import OpenIdMixin, OAuthMixin, OAuth2Mixin, TwitterMixin, AuthError, GoogleOAuth2Mixin, FacebookGraphMixin
 from tornado.concurrent import Future
 from tornado.escape import json_decode
 from tornado import gen
+from tornado.httputil import url_concat
 from tornado.log import gen_log
 from tornado.testing import AsyncHTTPTestCase, ExpectLog
 from tornado.util import u
@@ -125,6 +126,38 @@ def get(self):
         assert res.done()
 
 
+class FacebookClientLoginHandler(RequestHandler, FacebookGraphMixin):
+    def initialize(self, test):
+        self._OAUTH_AUTHORIZE_URL = test.get_url('/facebook/server/authorize')
+        self._OAUTH_ACCESS_TOKEN_URL = test.get_url('/facebook/server/access_token')
+        self._FACEBOOK_BASE_URL = test.get_url('/facebook/server')
+
+    @gen.coroutine
+    def get(self):
+        if self.get_argument("code", None):
+            user = yield self.get_authenticated_user(
+                redirect_uri=self.request.full_url(),
+                client_id=self.settings["facebook_api_key"],
+                client_secret=self.settings["facebook_secret"],
+                code=self.get_argument("code"))
+            self.write(user)
+        else:
+            yield self.authorize_redirect(
+                redirect_uri=self.request.full_url(),
+                client_id=self.settings["facebook_api_key"],
+                extra_params={"scope": "read_stream,offline_access"})
+
+
+class FacebookServerAccessTokenHandler(RequestHandler):
+    def get(self):
+        self.write('access_token=asdf')
+
+
+class FacebookServerMeHandler(RequestHandler):
+    def get(self):
+        self.write('{}')
+
+
 class TwitterClientHandler(RequestHandler, TwitterMixin):
     def initialize(self, test):
         self._OAUTH_REQUEST_TOKEN_URL = test.get_url('/oauth1/server/request_token')
@@ -259,6 +292,8 @@ def get_app(self):
                  dict(version='1.0a')),
                 ('/oauth2/client/login', OAuth2ClientLoginHandler, dict(test=self)),
 
+                ('/facebook/client/login', FacebookClientLoginHandler, dict(test=self)),
+
                 ('/twitter/client/login', TwitterClientLoginHandler, dict(test=self)),
                 ('/twitter/client/login_gen_engine', TwitterClientLoginGenEngineHandler, dict(test=self)),
                 ('/twitter/client/login_gen_coroutine', TwitterClientLoginGenCoroutineHandler, dict(test=self)),
@@ -270,13 +305,17 @@ def get_app(self):
                 ('/oauth1/server/request_token', OAuth1ServerRequestTokenHandler),
                 ('/oauth1/server/access_token', OAuth1ServerAccessTokenHandler),
 
+                ('/facebook/server/access_token', FacebookServerAccessTokenHandler),
+                ('/facebook/server/me', FacebookServerMeHandler),
                 ('/twitter/server/access_token', TwitterServerAccessTokenHandler),
                 (r'/twitter/api/users/show/(.*)\.json', TwitterServerShowUserHandler),
                 (r'/twitter/api/account/verify_credentials\.json', TwitterServerVerifyCredentialsHandler),
             ],
             http_client=self.http_client,
             twitter_consumer_key='test_twitter_consumer_key',
-            twitter_consumer_secret='test_twitter_consumer_secret')
+            twitter_consumer_secret='test_twitter_consumer_secret',
+            facebook_api_key='test_facebook_api_key',
+            facebook_secret='test_facebook_secret')
 
     def test_openid_redirect(self):
         response = self.fetch('/openid/client/login', follow_redirects=False)
@@ -357,6 +396,13 @@ def test_oauth2_redirect(self):
         self.assertEqual(response.code, 302)
         self.assertTrue('/oauth2/server/authorize?' in response.headers['Location'])
 
+    def test_facebook_login(self):
+        response = self.fetch('/facebook/client/login', follow_redirects=False)
+        self.assertEqual(response.code, 302)
+        self.assertTrue('/facebook/server/authorize?' in response.headers['Location'])
+        response = self.fetch('/facebook/client/login?code=1234', follow_redirects=False)
+        self.assertEqual(response.code, 200)
+
     def base_twitter_redirect(self, url):
         # Same as test_oauth10a_redirect
         response = self.fetch(url, follow_redirects=False)
@@ -413,3 +459,87 @@ def test_twitter_show_user_future_error(self):
         response = self.fetch('/twitter/client/show_user_future?name=error')
         self.assertEqual(response.code, 500)
         self.assertIn(b'Error response HTTP 500', response.body)
+
+
+class GoogleLoginHandler(RequestHandler, GoogleOAuth2Mixin):
+    def initialize(self, test):
+        self.test = test
+        self._OAUTH_REDIRECT_URI = test.get_url('/client/login')
+        self._OAUTH_AUTHORIZE_URL = test.get_url('/google/oauth2/authorize')
+        self._OAUTH_ACCESS_TOKEN_URL = test.get_url('/google/oauth2/token')
+
+    @gen.coroutine
+    def get(self):
+        code = self.get_argument('code', None)
+        if code is not None:
+            # retrieve authenticate google user
+            access = yield self.get_authenticated_user(self._OAUTH_REDIRECT_URI,
+                                                       code)
+            user = yield self.oauth2_request(
+                self.test.get_url("/google/oauth2/userinfo"),
+                access_token=access["access_token"])
+            # return the user and access token as json
+            user["access_token"] = access["access_token"]
+            self.write(user)
+        else:
+            yield self.authorize_redirect(
+                redirect_uri=self._OAUTH_REDIRECT_URI,
+                client_id=self.settings['google_oauth']['key'],
+                client_secret=self.settings['google_oauth']['secret'],
+                scope=['profile', 'email'],
+                response_type='code',
+                extra_params={'prompt': 'select_account'})
+
+
+class GoogleOAuth2AuthorizeHandler(RequestHandler):
+    def get(self):
+        # issue a fake auth code and redirect to redirect_uri
+        code = 'fake-authorization-code'
+        self.redirect(url_concat(self.get_argument('redirect_uri'),
+                                 dict(code=code)))
+
+
+class GoogleOAuth2TokenHandler(RequestHandler):
+    def post(self):
+        assert self.get_argument('code') == 'fake-authorization-code'
+        # issue a fake token
+        self.finish({
+            'access_token': 'fake-access-token',
+            'expires_in': 'never-expires'
+        })
+
+
+class GoogleOAuth2UserinfoHandler(RequestHandler):
+    def get(self):
+        assert self.get_argument('access_token') == 'fake-access-token'
+        # return a fake user
+        self.finish({
+            'name': 'Foo',
+            'email': 'foo@example.com'
+        })
+
+
+class GoogleOAuth2Test(AsyncHTTPTestCase):
+    def get_app(self):
+        return Application(
+            [
+                # test endpoints
+                ('/client/login', GoogleLoginHandler, dict(test=self)),
+
+                # simulated google authorization server endpoints
+                ('/google/oauth2/authorize', GoogleOAuth2AuthorizeHandler),
+                ('/google/oauth2/token', GoogleOAuth2TokenHandler),
+                ('/google/oauth2/userinfo', GoogleOAuth2UserinfoHandler),
+            ],
+            google_oauth={
+                "key": 'fake_google_client_id',
+                "secret": 'fake_google_client_secret'
+            })
+
+    def test_google_login(self):
+        response = self.fetch('/client/login')
+        self.assertDictEqual({
+            u('name'): u('Foo'),
+            u('email'): u('foo@example.com'),
+            u('access_token'): u('fake-access-token'),
+        }, json_decode(response.body))
diff --git a/lib/tornado/test/curl_httpclient_test.py b/lib/tornado/test/curl_httpclient_test.py
index 3ac21f4d72..d06a7bd2ad 100644
--- a/lib/tornado/test/curl_httpclient_test.py
+++ b/lib/tornado/test/curl_httpclient_test.py
@@ -121,3 +121,4 @@ def test_custom_reason(self):
     def test_fail_custom_reason(self):
         response = self.fetch('/custom_fail_reason')
         self.assertEqual(str(response.error), "HTTP 400: Custom reason")
+
diff --git a/lib/tornado/test/gen_test.py b/lib/tornado/test/gen_test.py
index fdaa0ec804..1b118f9488 100644
--- a/lib/tornado/test/gen_test.py
+++ b/lib/tornado/test/gen_test.py
@@ -6,7 +6,6 @@
 import sys
 import textwrap
 import time
-import platform
 import weakref
 
 from tornado.concurrent import return_future, Future
@@ -16,7 +15,7 @@
 from tornado.log import app_log
 from tornado import stack_context
 from tornado.testing import AsyncHTTPTestCase, AsyncTestCase, ExpectLog, gen_test
-from tornado.test.util import unittest, skipOnTravis
+from tornado.test.util import unittest, skipOnTravis, skipBefore33, skipBefore35, skipNotCPython, exec_test
 from tornado.web import Application, RequestHandler, asynchronous, HTTPError
 
 from tornado import gen
@@ -26,10 +25,6 @@
 except ImportError:
     futures = None
 
-skipBefore33 = unittest.skipIf(sys.version_info < (3, 3), 'PEP 380 not available')
-skipNotCPython = unittest.skipIf(platform.python_implementation() != 'CPython',
-                                 'Not CPython implementation')
-
 
 class GenEngineTest(AsyncTestCase):
     def setUp(self):
@@ -416,9 +411,9 @@ def test_multi_exceptions(self):
 
         # Exception logging may be explicitly quieted.
         with self.assertRaises(RuntimeError):
-                yield gen.Multi([self.async_exception(RuntimeError("error 1")),
-                                 self.async_exception(RuntimeError("error 2"))],
-                                quiet_exceptions=RuntimeError)
+            yield gen.Multi([self.async_exception(RuntimeError("error 1")),
+                             self.async_exception(RuntimeError("error 2"))],
+                            quiet_exceptions=RuntimeError)
 
     @gen_test
     def test_multi_future_exceptions(self):
@@ -435,10 +430,10 @@ def test_multi_future_exceptions(self):
 
         # Exception logging may be explicitly quieted.
         with self.assertRaises(RuntimeError):
-                yield gen.multi_future(
-                    [self.async_exception(RuntimeError("error 1")),
-                     self.async_exception(RuntimeError("error 2"))],
-                    quiet_exceptions=RuntimeError)
+            yield gen.multi_future(
+                [self.async_exception(RuntimeError("error 1")),
+                 self.async_exception(RuntimeError("error 2"))],
+                quiet_exceptions=RuntimeError)
 
     def test_arguments(self):
         @gen.engine
@@ -693,19 +688,13 @@ def f():
     @skipBefore33
     @gen_test
     def test_async_return(self):
-        # It is a compile-time error to return a value in a generator
-        # before Python 3.3, so we must test this with exec.
-        # Flatten the real global and local namespace into our fake globals:
-        # it's all global from the perspective of f().
-        global_namespace = dict(globals(), **locals())
-        local_namespace = {}
-        exec(textwrap.dedent("""
+        namespace = exec_test(globals(), locals(), """
         @gen.coroutine
         def f():
             yield gen.Task(self.io_loop.add_callback)
             return 42
-        """), global_namespace, local_namespace)
-        result = yield local_namespace['f']()
+        """)
+        result = yield namespace['f']()
         self.assertEqual(result, 42)
         self.finished = True
 
@@ -715,19 +704,69 @@ def test_async_early_return(self):
         # A yield statement exists but is not executed, which means
         # this function "returns" via an exception.  This exception
         # doesn't happen before the exception handling is set up.
-        global_namespace = dict(globals(), **locals())
-        local_namespace = {}
-        exec(textwrap.dedent("""
+        namespace = exec_test(globals(), locals(), """
         @gen.coroutine
         def f():
             if True:
                 return 42
             yield gen.Task(self.io_loop.add_callback)
-        """), global_namespace, local_namespace)
-        result = yield local_namespace['f']()
+        """)
+        result = yield namespace['f']()
+        self.assertEqual(result, 42)
+        self.finished = True
+
+    @skipBefore35
+    @gen_test
+    def test_async_await(self):
+        # This test verifies that an async function can await a
+        # yield-based gen.coroutine, and that a gen.coroutine
+        # (the test method itself) can yield an async function.
+        namespace = exec_test(globals(), locals(), """
+        async def f():
+            await gen.Task(self.io_loop.add_callback)
+            return 42
+        """)
+        result = yield namespace['f']()
         self.assertEqual(result, 42)
         self.finished = True
 
+    @skipBefore35
+    @gen_test
+    def test_async_await_mixed_multi_native_future(self):
+        namespace = exec_test(globals(), locals(), """
+        async def f1():
+            await gen.Task(self.io_loop.add_callback)
+            return 42
+        """)
+
+        @gen.coroutine
+        def f2():
+            yield gen.Task(self.io_loop.add_callback)
+            raise gen.Return(43)
+
+        results = yield [namespace['f1'](), f2()]
+        self.assertEqual(results, [42, 43])
+        self.finished = True
+
+    @skipBefore35
+    @gen_test
+    def test_async_await_mixed_multi_native_yieldpoint(self):
+        namespace = exec_test(globals(), locals(), """
+        async def f1():
+            await gen.Task(self.io_loop.add_callback)
+            return 42
+        """)
+
+        @gen.coroutine
+        def f2():
+            yield gen.Task(self.io_loop.add_callback)
+            raise gen.Return(43)
+
+        f2(callback=(yield gen.Callback('cb')))
+        results = yield [namespace['f1'](), gen.Wait('cb')]
+        self.assertEqual(results, [42, 43])
+        self.finished = True
+
     @gen_test
     def test_sync_return_no_value(self):
         @gen.coroutine
@@ -918,6 +957,7 @@ def inner(iteration):
 
         self.finished = True
 
+
 class GenSequenceHandler(RequestHandler):
     @asynchronous
     @gen.engine
@@ -1015,6 +1055,7 @@ def fail_task(callback):
             self.finish('ok')
 
 
+# "Undecorated" here refers to the absence of @asynchronous.
 class UndecoratedCoroutinesHandler(RequestHandler):
     @gen.coroutine
     def prepare(self):
@@ -1041,6 +1082,15 @@ def get(self):
         self.finish('ok')
 
 
+class NativeCoroutineHandler(RequestHandler):
+    if sys.version_info > (3, 5):
+        exec(textwrap.dedent("""
+        async def get(self):
+            await gen.Task(IOLoop.current().add_callback)
+            self.write("ok")
+        """))
+
+
 class GenWebTest(AsyncHTTPTestCase):
     def get_app(self):
         return Application([
@@ -1054,6 +1104,7 @@ def get_app(self):
             ('/yield_exception', GenYieldExceptionHandler),
             ('/undecorated_coroutine', UndecoratedCoroutinesHandler),
             ('/async_prepare_error', AsyncPrepareErrorHandler),
+            ('/native_coroutine', NativeCoroutineHandler),
         ])
 
     def test_sequence_handler(self):
@@ -1096,6 +1147,12 @@ def test_async_prepare_error_handler(self):
         response = self.fetch('/async_prepare_error')
         self.assertEqual(response.code, 403)
 
+    @skipBefore35
+    def test_native_coroutine_handler(self):
+        response = self.fetch('/native_coroutine')
+        self.assertEqual(response.code, 200)
+        self.assertEqual(response.body, b'ok')
+
 
 class WithTimeoutTest(AsyncTestCase):
     @gen_test
@@ -1249,6 +1306,45 @@ def test_iterator(self):
                     self.assertEqual(g.current_index, 3, 'wrong index')
             i += 1
 
+    @skipBefore35
+    @gen_test
+    def test_iterator_async_await(self):
+        # Recreate the previous test with py35 syntax. It's a little clunky
+        # because of the way the previous test handles an exception on
+        # a single iteration.
+        futures = [Future(), Future(), Future(), Future()]
+        self.finish_coroutines(0, futures)
+        self.finished = False
+
+        namespace = exec_test(globals(), locals(), """
+        async def f():
+            i = 0
+            g = gen.WaitIterator(*futures)
+            try:
+                async for r in g:
+                    if i == 0:
+                        self.assertEqual(r, 24, 'iterator value incorrect')
+                        self.assertEqual(g.current_index, 2, 'wrong index')
+                    else:
+                        raise Exception("expected exception on iteration 1")
+                    i += 1
+            except ZeroDivisionError:
+                i += 1
+            async for r in g:
+                if i == 2:
+                    self.assertEqual(r, 42, 'iterator value incorrect')
+                    self.assertEqual(g.current_index, 1, 'wrong index')
+                elif i == 3:
+                    self.assertEqual(r, 84, 'iterator value incorrect')
+                    self.assertEqual(g.current_index, 3, 'wrong index')
+                else:
+                    raise Exception("didn't expect iteration %d" % i)
+                i += 1
+            self.finished = True
+        """)
+        yield namespace['f']()
+        self.assertTrue(self.finished)
+
     @gen_test
     def test_no_ref(self):
         # In this usage, there is no direct hard reference to the
diff --git a/lib/tornado/test/httpclient_test.py b/lib/tornado/test/httpclient_test.py
index ecc63e4a49..e7551c93bb 100644
--- a/lib/tornado/test/httpclient_test.py
+++ b/lib/tornado/test/httpclient_test.py
@@ -5,6 +5,7 @@
 import base64
 import binascii
 from contextlib import closing
+import copy
 import functools
 import sys
 import threading
@@ -48,6 +49,7 @@ def put(self):
 
 class RedirectHandler(RequestHandler):
     def prepare(self):
+        self.write('redirects can have bodies too')
         self.redirect(self.get_argument("url"),
                       status=int(self.get_argument("status", "302")))
 
@@ -371,6 +373,32 @@ def test_header_types(self):
                     "response=%r, value=%r, container=%r" %
                     (resp.body, value, container))
 
+    def test_multi_line_headers(self):
+        # Multi-line http headers are rare but rfc-allowed
+        # http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.2
+        sock, port = bind_unused_port()
+        with closing(sock):
+            def write_response(stream, request_data):
+                if b"HTTP/1." not in request_data:
+                    self.skipTest("requires HTTP/1.x")
+                stream.write(b"""\
+HTTP/1.1 200 OK
+X-XSS-Protection: 1;
+\tmode=block
+
+""".replace(b"\n", b"\r\n"), callback=stream.close)
+
+            def accept_callback(conn, address):
+                stream = IOStream(conn, io_loop=self.io_loop)
+                stream.read_until(b"\r\n\r\n",
+                                  functools.partial(write_response, stream))
+            netutil.add_accept_handler(sock, accept_callback, self.io_loop)
+            self.http_client.fetch("http://127.0.0.1:%d/" % port, self.stop)
+            resp = self.wait()
+            resp.rethrow()
+            self.assertEqual(resp.headers['X-XSS-Protection'], "1; mode=block")
+            self.io_loop.remove_handler(sock.fileno())
+
     def test_304_with_content_length(self):
         # According to the spec 304 responses SHOULD NOT include
         # Content-Length or other entity headers, but some servers do it
@@ -444,18 +472,32 @@ def test_all_methods(self):
                               allow_nonstandard_methods=True)
         self.assertEqual(response.body, b'OTHER')
 
-    @gen_test
     def test_body_sanity_checks(self):
-        hello_url = self.get_url('/hello')
-        with self.assertRaises(ValueError) as context:
-            yield self.http_client.fetch(hello_url, body='data')
-
-        self.assertTrue('must be None' in str(context.exception))
-
-        with self.assertRaises(ValueError) as context:
-            yield self.http_client.fetch(hello_url, method='POST')
-
-        self.assertTrue('must not be None' in str(context.exception))
+        # These methods require a body.
+        for method in ('POST', 'PUT', 'PATCH'):
+            with self.assertRaises(ValueError) as context:
+                resp = self.fetch('/all_methods', method=method)
+                resp.rethrow()
+            self.assertIn('must not be None', str(context.exception))
+
+            resp = self.fetch('/all_methods', method=method,
+                              allow_nonstandard_methods=True)
+            self.assertEqual(resp.code, 200)
+
+        # These methods don't allow a body.
+        for method in ('GET', 'DELETE', 'OPTIONS'):
+            with self.assertRaises(ValueError) as context:
+                resp = self.fetch('/all_methods', method=method, body=b'asdf')
+                resp.rethrow()
+            self.assertIn('must be None', str(context.exception))
+
+            # In most cases this can be overridden, but curl_httpclient
+            # does not allow body with a GET at all.
+            if method != 'GET':
+                resp = self.fetch('/all_methods', method=method, body=b'asdf',
+                                  allow_nonstandard_methods=True)
+                resp.rethrow()
+                self.assertEqual(resp.code, 200)
 
     # This test causes odd failures with the combination of
     # curl_httpclient (at least with the version of libcurl available
@@ -605,3 +647,15 @@ def test_if_modified_since(self):
         request = HTTPRequest('http://example.com', if_modified_since=http_date)
         self.assertEqual(request.headers,
                          {'If-Modified-Since': format_timestamp(http_date)})
+
+
+class HTTPErrorTestCase(unittest.TestCase):
+    def test_copy(self):
+        e = HTTPError(403)
+        e2 = copy.copy(e)
+        self.assertIsNot(e, e2)
+        self.assertEqual(e.code, e2.code)
+
+    def test_str(self):
+        e = HTTPError(403)
+        self.assertEqual(str(e), "HTTP 403: Forbidden")
diff --git a/lib/tornado/test/httpserver_test.py b/lib/tornado/test/httpserver_test.py
index f05599dd12..065f5b1fad 100644
--- a/lib/tornado/test/httpserver_test.py
+++ b/lib/tornado/test/httpserver_test.py
@@ -117,6 +117,16 @@ def test_non_ssl_request(self):
                 response = self.wait()
         self.assertEqual(response.code, 599)
 
+    def test_error_logging(self):
+        # No stack traces are logged for SSL errors.
+        with ExpectLog(gen_log, 'SSL Error') as expect_log:
+            self.http_client.fetch(
+                self.get_url("/").replace("https:", "http:"),
+                self.stop)
+            response = self.wait()
+            self.assertEqual(response.code, 599)
+        self.assertFalse(expect_log.logged_stack)
+
 # Python's SSL implementation differs significantly between versions.
 # For example, SSLv3 and TLSv1 throw an exception if you try to read
 # from the socket before the handshake is complete, but the default
@@ -167,12 +177,12 @@ def test_missing_key(self):
         self.assertRaises((ValueError, IOError),
                           HTTPServer, application, ssl_options={
                               "certfile": "/__mising__.crt",
-                          })
+        })
         self.assertRaises((ValueError, IOError),
                           HTTPServer, application, ssl_options={
                               "certfile": existing_certificate,
                               "keyfile": "/__missing__.key"
-                          })
+        })
 
         # This actually works because both files exist
         HTTPServer(application, ssl_options={
@@ -889,9 +899,12 @@ def test_small_headers(self):
         self.assertEqual(response.body, b"Hello world")
 
     def test_large_headers(self):
-        with ExpectLog(gen_log, "Unsatisfiable read"):
+        with ExpectLog(gen_log, "Unsatisfiable read", required=False):
             response = self.fetch("/", headers={'X-Filler': 'a' * 1000})
-        self.assertEqual(response.code, 599)
+        # 431 is "Request Header Fields Too Large", defined in RFC
+        # 6585. However, many implementations just close the
+        # connection in this case, resulting in a 599.
+        self.assertIn(response.code, (431, 599))
 
 
 @skipOnTravis
diff --git a/lib/tornado/test/httputil_test.py b/lib/tornado/test/httputil_test.py
index 6e95360174..3f25f3eeda 100644
--- a/lib/tornado/test/httputil_test.py
+++ b/lib/tornado/test/httputil_test.py
@@ -12,11 +12,11 @@
 import copy
 import datetime
 import logging
+import pickle
 import time
 
 
 class TestUrlConcat(unittest.TestCase):
-
     def test_url_concat_no_query_params(self):
         url = url_concat(
             "https://localhost/path",
@@ -298,6 +298,26 @@ def test_copy(self):
             self.assertIsNot(headers, h1)
             self.assertIsNot(headers.get_list('A'), h1.get_list('A'))
 
+    def test_pickle_roundtrip(self):
+        headers = HTTPHeaders()
+        headers.add('Set-Cookie', 'a=b')
+        headers.add('Set-Cookie', 'c=d')
+        headers.add('Content-Type', 'text/html')
+        pickled = pickle.dumps(headers)
+        unpickled = pickle.loads(pickled)
+        self.assertEqual(sorted(headers.get_all()), sorted(unpickled.get_all()))
+        self.assertEqual(sorted(headers.items()), sorted(unpickled.items()))
+
+    def test_setdefault(self):
+        headers = HTTPHeaders()
+        headers['foo'] = 'bar'
+        # If a value is present, setdefault returns it without changes.
+        self.assertEqual(headers.setdefault('foo', 'baz'), 'bar')
+        self.assertEqual(headers['foo'], 'bar')
+        # If a value is not present, setdefault sets it for future use.
+        self.assertEqual(headers.setdefault('quux', 'xyzzy'), 'xyzzy')
+        self.assertEqual(headers['quux'], 'xyzzy')
+        self.assertEqual(sorted(headers.get_all()), [('Foo', 'bar'), ('Quux', 'xyzzy')])
 
 
 class FormatTimestampTest(unittest.TestCase):
diff --git a/lib/tornado/test/ioloop_test.py b/lib/tornado/test/ioloop_test.py
index f3a0cbdcfe..71b4ef873e 100644
--- a/lib/tornado/test/ioloop_test.py
+++ b/lib/tornado/test/ioloop_test.py
@@ -16,7 +16,7 @@
 from tornado.platform.select import _Select
 from tornado.stack_context import ExceptionStackContext, StackContext, wrap, NullContext
 from tornado.testing import AsyncTestCase, bind_unused_port, ExpectLog
-from tornado.test.util import unittest, skipIfNonUnix, skipOnTravis
+from tornado.test.util import unittest, skipIfNonUnix, skipOnTravis, skipBefore35, exec_test
 
 try:
     from concurrent import futures
@@ -363,6 +363,18 @@ def callback():
             with ExpectLog(app_log, "Exception in callback"):
                 self.wait()
 
+    @skipBefore35
+    def test_exception_logging_native_coro(self):
+        """The IOLoop examines exceptions from awaitables and logs them."""
+        namespace = exec_test(globals(), locals(), """
+        async def callback():
+            self.io_loop.add_callback(self.stop)
+            1 / 0
+        """)
+        with NullContext():
+            self.io_loop.add_callback(namespace["callback"])
+            with ExpectLog(app_log, "Exception in callback"):
+                self.wait()
     def test_spawn_callback(self):
         # An added callback runs in the test's stack_context, so will be
         # re-arised in wait().
@@ -409,18 +421,46 @@ def handle_read(fd, events):
 # automatically set as current.
 class TestIOLoopCurrent(unittest.TestCase):
     def setUp(self):
-        self.io_loop = IOLoop()
+        self.io_loop = None
+        IOLoop.clear_current()
 
     def tearDown(self):
-        self.io_loop.close()
+        if self.io_loop is not None:
+            self.io_loop.close()
 
-    def test_current(self):
-        def f():
-            self.current_io_loop = IOLoop.current()
-            self.io_loop.stop()
-        self.io_loop.add_callback(f)
-        self.io_loop.start()
-        self.assertIs(self.current_io_loop, self.io_loop)
+    def test_default_current(self):
+        self.io_loop = IOLoop()
+        # The first IOLoop with default arguments is made current.
+        self.assertIs(self.io_loop, IOLoop.current())
+        # A second IOLoop can be created but is not made current.
+        io_loop2 = IOLoop()
+        self.assertIs(self.io_loop, IOLoop.current())
+        io_loop2.close()
+
+    def test_non_current(self):
+        self.io_loop = IOLoop(make_current=False)
+        # The new IOLoop is not initially made current.
+        self.assertIsNone(IOLoop.current(instance=False))
+        # Starting the IOLoop makes it current, and stopping the loop
+        # makes it non-current. This process is repeatable.
+        for i in range(3):
+            def f():
+                self.current_io_loop = IOLoop.current()
+                self.io_loop.stop()
+            self.io_loop.add_callback(f)
+            self.io_loop.start()
+            self.assertIs(self.current_io_loop, self.io_loop)
+            # Now that the loop is stopped, it is no longer current.
+            self.assertIsNone(IOLoop.current(instance=False))
+
+    def test_force_current(self):
+        self.io_loop = IOLoop(make_current=True)
+        self.assertIs(self.io_loop, IOLoop.current())
+        with self.assertRaises(RuntimeError):
+            # A second make_current=True construction cannot succeed.
+            IOLoop(make_current=True)
+        # current() was not affected by the failed construction.
+        self.assertIs(self.io_loop, IOLoop.current())
 
 
 class TestIOLoopAddCallback(AsyncTestCase):
@@ -530,7 +570,8 @@ def tearDown(self):
         self.io_loop.close()
 
     def test_sync_result(self):
-        self.assertEqual(self.io_loop.run_sync(lambda: 42), 42)
+        with self.assertRaises(gen.BadYieldError):
+            self.io_loop.run_sync(lambda: 42)
 
     def test_sync_exception(self):
         with self.assertRaises(ZeroDivisionError):
@@ -562,6 +603,14 @@ def f():
             yield gen.Task(self.io_loop.add_timeout, self.io_loop.time() + 1)
         self.assertRaises(TimeoutError, self.io_loop.run_sync, f, timeout=0.01)
 
+    @skipBefore35
+    def test_native_coroutine(self):
+        namespace = exec_test(globals(), locals(), """
+        async def f():
+            await gen.Task(self.io_loop.add_callback)
+        """)
+        self.io_loop.run_sync(namespace['f'])
+
 
 class TestPeriodicCallback(unittest.TestCase):
     def setUp(self):
diff --git a/lib/tornado/test/iostream_test.py b/lib/tornado/test/iostream_test.py
index 45df6b50a7..060f7a454f 100644
--- a/lib/tornado/test/iostream_test.py
+++ b/lib/tornado/test/iostream_test.py
@@ -19,6 +19,14 @@
 import ssl
 import sys
 
+try:
+    from unittest import mock  # python 3.3
+except ImportError:
+    try:
+        import mock  # third-party mock package
+    except ImportError:
+        mock = None
+
 
 def _server_ssl_options():
     return dict(
@@ -239,19 +247,22 @@ def connect_callback():
             # cygwin's errnos don't match those used on native windows python
             self.assertTrue(stream.error.args[0] in _ERRNO_CONNREFUSED)
 
+    @unittest.skipIf(mock is None, 'mock package not present')
     def test_gaierror(self):
-        # Test that IOStream sets its exc_info on getaddrinfo error
+        # Test that IOStream sets its exc_info on getaddrinfo error.
+        # It's difficult to reliably trigger a getaddrinfo error;
+        # some resolvers own't even return errors for malformed names,
+        # so we mock it instead. If IOStream changes to call a Resolver
+        # before sock.connect, the mock target will need to change too.
         s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
         stream = IOStream(s, io_loop=self.io_loop)
         stream.set_close_callback(self.stop)
-        # To reliably generate a gaierror we use a malformed domain name
-        # instead of a name that's simply unlikely to exist (since
-        # opendns and some ISPs return bogus addresses for nonexistent
-        # domains instead of the proper error codes).
-        with ExpectLog(gen_log, "Connect error"):
-            stream.connect(('an invalid domain', 54321), callback=self.stop)
-            self.wait()
-            self.assertTrue(isinstance(stream.error, socket.gaierror), stream.error)
+        with mock.patch('socket.socket.connect',
+                        side_effect=socket.gaierror('boom')):
+            with ExpectLog(gen_log, "Connect error"):
+                stream.connect(('localhost', 80), callback=self.stop)
+                self.wait()
+                self.assertIsInstance(stream.error, socket.gaierror)
 
     def test_read_callback_error(self):
         # Test that IOStream sets its exc_info when a read callback throws
@@ -446,6 +457,18 @@ def test_read_until_close_after_close(self):
             server.close()
             client.close()
 
+    @unittest.skipIf(mock is None, 'mock package not present')
+    def test_read_until_close_with_error(self):
+        server, client = self.make_iostream_pair()
+        try:
+            with mock.patch('tornado.iostream.BaseIOStream._try_inline_read',
+                            side_effect=IOError('boom')):
+                with self.assertRaisesRegexp(IOError, 'boom'):
+                    client.read_until_close(self.stop)
+        finally:
+            server.close()
+            client.close()
+
     def test_streaming_read_until_close_after_close(self):
         # Same as the preceding test but with a streaming_callback.
         # All data should go through the streaming callback,
@@ -928,8 +951,10 @@ def test_check_hostname(self):
             server_hostname=b'127.0.0.1')
         with ExpectLog(gen_log, "SSL Error"):
             with self.assertRaises(ssl.SSLError):
+                # The client fails to connect with an SSL error.
                 yield client_future
-        with self.assertRaises((ssl.SSLError, socket.error)):
+        with self.assertRaises(Exception):
+            # The server fails to connect, but the exact error is unspecified.
             yield server_future
 
 
diff --git a/lib/tornado/test/locale_test.py b/lib/tornado/test/locale_test.py
index 31c57a6194..e25783861e 100644
--- a/lib/tornado/test/locale_test.py
+++ b/lib/tornado/test/locale_test.py
@@ -2,9 +2,12 @@
 
 import datetime
 import os
+import shutil
+import tempfile
+
 import tornado.locale
-from tornado.escape import utf8
-from tornado.test.util import unittest
+from tornado.escape import utf8, to_unicode
+from tornado.test.util import unittest, skipOnAppEngine
 from tornado.util import u, unicode_type
 
 
@@ -34,6 +37,28 @@ def test_csv(self):
         self.assertTrue(isinstance(locale, tornado.locale.CSVLocale))
         self.assertEqual(locale.translate("school"), u("\u00e9cole"))
 
+    # tempfile.mkdtemp is not available on app engine.
+    @skipOnAppEngine
+    def test_csv_bom(self):
+        with open(os.path.join(os.path.dirname(__file__), 'csv_translations',
+                               'fr_FR.csv'), 'rb') as f:
+            char_data = to_unicode(f.read())
+        # Re-encode our input data (which is utf-8 without BOM) in
+        # encodings that use the BOM and ensure that we can still load
+        # it. Note that utf-16-le and utf-16-be do not write a BOM,
+        # so we only test whichver variant is native to our platform.
+        for encoding in ['utf-8-sig', 'utf-16']:
+            tmpdir = tempfile.mkdtemp()
+            try:
+                with open(os.path.join(tmpdir, 'fr_FR.csv'), 'wb') as f:
+                    f.write(char_data.encode(encoding))
+                tornado.locale.load_translations(tmpdir)
+                locale = tornado.locale.get('fr_FR')
+                self.assertIsInstance(locale, tornado.locale.CSVLocale)
+                self.assertEqual(locale.translate("school"), u("\u00e9cole"))
+            finally:
+                shutil.rmtree(tmpdir)
+
     def test_gettext(self):
         tornado.locale.load_gettext_translations(
             os.path.join(os.path.dirname(__file__), 'gettext_translations'),
diff --git a/lib/tornado/test/locks_test.py b/lib/tornado/test/locks_test.py
index 90bdafaa60..020ec105e0 100644
--- a/lib/tornado/test/locks_test.py
+++ b/lib/tornado/test/locks_test.py
@@ -10,12 +10,14 @@
 # License for the specific language governing permissions and limitations
 # under the License.
 
+
+from __future__ import absolute_import, division, print_function, with_statement
 from datetime import timedelta
 
 from tornado import gen, locks
 from tornado.gen import TimeoutError
 from tornado.testing import gen_test, AsyncTestCase
-from tornado.test.util import unittest
+from tornado.test.util import unittest, skipBefore35, exec_test
 
 
 class ConditionTest(AsyncTestCase):
@@ -328,6 +330,22 @@ def test_context_manager(self):
         # Semaphore was released and can be acquired again.
         self.assertTrue(sem.acquire().done())
 
+    @skipBefore35
+    @gen_test
+    def test_context_manager_async_await(self):
+        # Repeat the above test using 'async with'.
+        sem = locks.Semaphore()
+
+        namespace = exec_test(globals(), locals(), """
+        async def f():
+            async with sem as yielded:
+                self.assertTrue(yielded is None)
+        """)
+        yield namespace['f']()
+
+        # Semaphore was released and can be acquired again.
+        self.assertTrue(sem.acquire().done())
+
     @gen_test
     def test_context_manager_exception(self):
         sem = locks.Semaphore()
@@ -443,6 +461,26 @@ def f(idx):
         yield futures
         self.assertEqual(list(range(N)), history)
 
+    @skipBefore35
+    @gen_test
+    def test_acquire_fifo_async_with(self):
+        # Repeat the above test using `async with lock:`
+        # instead of `with (yield lock.acquire()):`.
+        lock = locks.Lock()
+        self.assertTrue(lock.acquire().done())
+        N = 5
+        history = []
+
+        namespace = exec_test(globals(), locals(), """
+        async def f(idx):
+            async with lock:
+                history.append(idx)
+        """)
+        futures = [namespace['f'](i) for i in range(N)]
+        lock.release()
+        yield futures
+        self.assertEqual(list(range(N)), history)
+
     @gen_test
     def test_acquire_timeout(self):
         lock = locks.Lock()
diff --git a/lib/tornado/test/log_test.py b/lib/tornado/test/log_test.py
index 102d3d42f9..df493bcdc4 100644
--- a/lib/tornado/test/log_test.py
+++ b/lib/tornado/test/log_test.py
@@ -160,6 +160,39 @@ def test_log_file(self):
                 os.unlink(filename)
             os.rmdir(tmpdir)
 
+    def test_log_file_with_timed_rotating(self):
+        tmpdir = tempfile.mkdtemp()
+        try:
+            self.options.log_file_prefix = tmpdir + '/test_log'
+            self.options.log_rotate_mode = 'time'
+            enable_pretty_logging(options=self.options, logger=self.logger)
+            self.logger.error('hello')
+            self.logger.handlers[0].flush()
+            filenames = glob.glob(tmpdir + '/test_log*')
+            self.assertEqual(1, len(filenames))
+            with open(filenames[0]) as f:
+                self.assertRegexpMatches(
+                    f.read(),
+                    r'^\[E [^]]*\] hello$')
+        finally:
+            for handler in self.logger.handlers:
+                handler.flush()
+                handler.close()
+            for filename in glob.glob(tmpdir + '/test_log*'):
+                os.unlink(filename)
+            os.rmdir(tmpdir)
+
+    def test_wrong_rotate_mode_value(self):
+        try:
+            self.options.log_file_prefix = 'some_path'
+            self.options.log_rotate_mode = 'wrong_mode'
+            self.assertRaises(ValueError, enable_pretty_logging,
+                              options=self.options, logger=self.logger)
+        finally:
+            for handler in self.logger.handlers:
+                handler.flush()
+                handler.close()
+
 
 class LoggingOptionTest(unittest.TestCase):
     """Test the ability to enable and disable Tornado's logging hooks."""
diff --git a/lib/tornado/test/netutil_test.py b/lib/tornado/test/netutil_test.py
index 7d9cad34a0..9ef5f7cfe1 100644
--- a/lib/tornado/test/netutil_test.py
+++ b/lib/tornado/test/netutil_test.py
@@ -9,7 +9,7 @@
 
 from tornado.netutil import BlockingResolver, ThreadedResolver, is_valid_ip, bind_sockets
 from tornado.stack_context import ExceptionStackContext
-from tornado.testing import AsyncTestCase, gen_test
+from tornado.testing import AsyncTestCase, gen_test, bind_unused_port
 from tornado.test.util import unittest, skipIfNoNetwork
 
 try:
@@ -200,3 +200,14 @@ def test_same_port_allocation(self):
         finally:
             for sock in sockets:
                 sock.close()
+
+    @unittest.skipIf(not hasattr(socket, "SO_REUSEPORT"), "SO_REUSEPORT is not supported")
+    def test_reuse_port(self):
+        socket, port = bind_unused_port(reuse_port=True)
+        try:
+            sockets = bind_sockets(port, 'localhost', reuse_port=True)
+            self.assertTrue(all(s.getsockname()[1] == port for s in sockets))
+        finally:
+            socket.close()
+            for sock in sockets:
+                sock.close()
diff --git a/lib/tornado/test/options_test.cfg b/lib/tornado/test/options_test.cfg
index 09a6306006..cbac892471 100644
--- a/lib/tornado/test/options_test.cfg
+++ b/lib/tornado/test/options_test.cfg
@@ -1,3 +1,5 @@
 port=443
 port=443
-username='李康'
\ No newline at end of file
+username='李康'
+
+foo_bar='a'
diff --git a/lib/tornado/test/options_test.py b/lib/tornado/test/options_test.py
index f90c30d251..c32184bb40 100644
--- a/lib/tornado/test/options_test.py
+++ b/lib/tornado/test/options_test.py
@@ -221,3 +221,45 @@ def test_error_redefine(self):
             options.define('foo')
         self.assertRegexpMatches(str(cm.exception),
                                  'Option.*foo.*already defined')
+
+    def test_dash_underscore_cli(self):
+        # Dashes and underscores should be interchangeable.
+        for defined_name in ['foo-bar', 'foo_bar']:
+            for flag in ['--foo-bar=a', '--foo_bar=a']:
+                options = OptionParser()
+                options.define(defined_name)
+                options.parse_command_line(['main.py', flag])
+                # Attr-style access always uses underscores.
+                self.assertEqual(options.foo_bar, 'a')
+                # Dict-style access allows both.
+                self.assertEqual(options['foo-bar'], 'a')
+                self.assertEqual(options['foo_bar'], 'a')
+
+    def test_dash_underscore_file(self):
+        # No matter how an option was defined, it can be set with underscores
+        # in a config file.
+        for defined_name in ['foo-bar', 'foo_bar']:
+            options = OptionParser()
+            options.define(defined_name)
+            options.parse_config_file(os.path.join(os.path.dirname(__file__),
+                                                   "options_test.cfg"))
+            self.assertEqual(options.foo_bar, 'a')
+
+    def test_dash_underscore_introspection(self):
+        # Original names are preserved in introspection APIs.
+        options = OptionParser()
+        options.define('with-dash', group='g')
+        options.define('with_underscore', group='g')
+        all_options = ['help', 'with-dash', 'with_underscore']
+        self.assertEqual(sorted(options), all_options)
+        self.assertEqual(sorted(k for (k, v) in options.items()), all_options)
+        self.assertEqual(sorted(options.as_dict().keys()), all_options)
+
+        self.assertEqual(sorted(options.group_dict('g')),
+                         ['with-dash', 'with_underscore'])
+
+        # --help shows CLI-style names with dashes.
+        buf = StringIO()
+        options.print_help(buf)
+        self.assertIn('--with-dash', buf.getvalue())
+        self.assertIn('--with-underscore', buf.getvalue())
diff --git a/lib/tornado/test/queues_test.py b/lib/tornado/test/queues_test.py
index f2ffb646f0..e72b6ed5f8 100644
--- a/lib/tornado/test/queues_test.py
+++ b/lib/tornado/test/queues_test.py
@@ -10,13 +10,15 @@
 # License for the specific language governing permissions and limitations
 # under the License.
 
+
+from __future__ import absolute_import, division, print_function, with_statement
 from datetime import timedelta
 from random import random
 
 from tornado import gen, queues
 from tornado.gen import TimeoutError
 from tornado.testing import gen_test, AsyncTestCase
-from tornado.test.util import unittest
+from tornado.test.util import unittest, skipBefore35, exec_test
 
 
 class QueueBasicTest(AsyncTestCase):
@@ -112,7 +114,7 @@ def test_get_timeout(self):
         get = q.get()
         with self.assertRaises(TimeoutError):
             yield get_timeout
-        
+
         q.put_nowait(0)
         self.assertEqual(0, (yield get))
 
@@ -154,6 +156,24 @@ def test_get_clears_timed_out_getters(self):
         for getter in getters:
             self.assertRaises(TimeoutError, getter.result)
 
+    @skipBefore35
+    @gen_test
+    def test_async_for(self):
+        q = queues.Queue()
+        for i in range(5):
+            q.put(i)
+
+        namespace = exec_test(globals(), locals(), """
+        async def f():
+            results = []
+            async for i in q:
+                results.append(i)
+                if i == 4:
+                    return results
+        """)
+        results = yield namespace['f']()
+        self.assertEqual(results, list(range(5)))
+
 
 class QueuePutTest(AsyncTestCase):
     @gen_test
@@ -176,7 +196,7 @@ def test_put_with_getters(self):
         self.assertEqual(0, (yield get0))
         yield q.put(1)
         self.assertEqual(1, (yield get1))
-        
+
     @gen_test
     def test_nonblocking_put_with_getters(self):
         q = queues.Queue()
@@ -208,7 +228,7 @@ def test_put_timeout(self):
         put = q.put(2)
         with self.assertRaises(TimeoutError):
             yield put_timeout
-        
+
         self.assertEqual(0, q.get_nowait())
         # 1 was never put in the queue.
         self.assertEqual(2, (yield q.get()))
@@ -281,7 +301,7 @@ def test_float_maxsize(self):
 
 class QueueJoinTest(AsyncTestCase):
     queue_class = queues.Queue
-    
+
     def test_task_done_underflow(self):
         q = self.queue_class()
         self.assertRaises(ValueError, q.task_done)
@@ -338,7 +358,7 @@ def test_join_timeout(self):
 
 class PriorityQueueJoinTest(QueueJoinTest):
     queue_class = queues.PriorityQueue
-    
+
     @gen_test
     def test_order(self):
         q = self.queue_class(maxsize=2)
diff --git a/lib/tornado/test/simple_httpclient_test.py b/lib/tornado/test/simple_httpclient_test.py
index c0de22b7cf..da11abbe37 100644
--- a/lib/tornado/test/simple_httpclient_test.py
+++ b/lib/tornado/test/simple_httpclient_test.py
@@ -11,17 +11,18 @@
 import ssl
 import sys
 
+from tornado.escape import to_unicode
 from tornado import gen
 from tornado.httpclient import AsyncHTTPClient
 from tornado.httputil import HTTPHeaders, ResponseStartLine
 from tornado.ioloop import IOLoop
 from tornado.log import gen_log
 from tornado.netutil import Resolver, bind_sockets
-from tornado.simple_httpclient import SimpleAsyncHTTPClient, _default_ca_certs
-from tornado.test.httpclient_test import ChunkHandler, CountdownHandler, HelloWorldHandler
+from tornado.simple_httpclient import SimpleAsyncHTTPClient
+from tornado.test.httpclient_test import ChunkHandler, CountdownHandler, HelloWorldHandler, RedirectHandler
 from tornado.test import httpclient_test
 from tornado.testing import AsyncHTTPTestCase, AsyncHTTPSTestCase, AsyncTestCase, ExpectLog
-from tornado.test.util import skipOnTravis, skipIfNoIPv6, refusing_port, unittest
+from tornado.test.util import skipOnTravis, skipIfNoIPv6, refusing_port, unittest, skipBefore35, exec_test
 from tornado.web import RequestHandler, Application, asynchronous, url, stream_request_body
 
 
@@ -106,7 +107,7 @@ def get(self):
             # level so we have to go around it.
             stream = self.request.connection.detach()
             stream.write(b"HTTP/1.0 200 OK\r\n\r\n"
-                               b"hello")
+                         b"hello")
             stream.close()
         else:
             self.finish('HTTP/1 required')
@@ -145,6 +146,7 @@ def get_app(self):
             url("/no_content_length", NoContentLengthHandler),
             url("/echo_post", EchoPostHandler),
             url("/respond_in_prepare", RespondInPrepareHandler),
+            url("/redirect", RedirectHandler),
         ], gzip=True)
 
     def test_singleton(self):
@@ -205,6 +207,7 @@ def test_gzip(self):
         self.assertEqual(response.headers["Content-Encoding"], "gzip")
         self.assertNotEqual(response.body, b"asdfqwer")
         # Our test data gets bigger when gzipped.  Oops.  :)
+        # Chunked encoding bypasses the MIN_LENGTH check.
         self.assertEqual(len(response.body), 34)
         f = gzip.GzipFile(mode="r", fileobj=response.buffer)
         self.assertEqual(f.read(), b"asdfqwer")
@@ -401,6 +404,33 @@ def test_async_body_producer_content_length(self):
         response.rethrow()
         self.assertEqual(response.body, b"12345678")
 
+    @skipBefore35
+    def test_native_body_producer_chunked(self):
+        namespace = exec_test(globals(), locals(), """
+        async def body_producer(write):
+            await write(b'1234')
+            await gen.Task(IOLoop.current().add_callback)
+            await write(b'5678')
+        """)
+        response = self.fetch("/echo_post", method="POST",
+                              body_producer=namespace["body_producer"])
+        response.rethrow()
+        self.assertEqual(response.body, b"12345678")
+
+    @skipBefore35
+    def test_native_body_producer_content_length(self):
+        namespace = exec_test(globals(), locals(), """
+        async def body_producer(write):
+            await write(b'1234')
+            await gen.Task(IOLoop.current().add_callback)
+            await write(b'5678')
+        """)
+        response = self.fetch("/echo_post", method="POST",
+                              body_producer=namespace["body_producer"],
+                              headers={'Content-Length': '8'})
+        response.rethrow()
+        self.assertEqual(response.body, b"12345678")
+
     def test_100_continue(self):
         response = self.fetch("/echo_post", method="POST",
                               body=b"1234",
@@ -415,6 +445,24 @@ def body_producer(write):
                               expect_100_continue=True)
         self.assertEqual(response.code, 403)
 
+    def test_streaming_follow_redirects(self):
+        # When following redirects, header and streaming callbacks
+        # should only be called for the final result.
+        # TODO(bdarnell): this test belongs in httpclient_test instead of
+        # simple_httpclient_test, but it fails with the version of libcurl
+        # available on travis-ci. Move it when that has been upgraded
+        # or we have a better framework to skip tests based on curl version.
+        headers = []
+        chunks = []
+        self.fetch("/redirect?url=/hello",
+                   header_callback=headers.append,
+                   streaming_callback=chunks.append)
+        chunks = list(map(to_unicode, chunks))
+        self.assertEqual(chunks, ['Hello world!'])
+        # Make sure we only got one set of headers.
+        num_start_lines = len([h for h in headers if h.startswith("HTTP/")])
+        self.assertEqual(num_start_lines, 1)
+
 
 class SimpleHTTPClientTestCase(SimpleHTTPClientTestMixin, AsyncHTTPTestCase):
     def setUp(self):
@@ -463,6 +511,16 @@ def test_ssl_context_handshake_fail(self):
             resp = self.fetch("/hello", ssl_options=ctx)
         self.assertRaises(ssl.SSLError, resp.rethrow)
 
+    def test_error_logging(self):
+        # No stack traces are logged for SSL errors (in this case,
+        # failure to validate the testing self-signed cert).
+        # The SSLError is exposed through ssl.SSLError.
+        with ExpectLog(gen_log, '.*') as expect_log:
+            response = self.fetch("/", validate_cert=True)
+            self.assertEqual(response.code, 599)
+            self.assertIsInstance(response.error, ssl.SSLError)
+        self.assertFalse(expect_log.logged_stack)
+
 
 class CreateAsyncHTTPClientTestCase(AsyncTestCase):
     def setUp(self):
@@ -637,22 +695,22 @@ class MaxBodySizeTest(AsyncHTTPTestCase):
     def get_app(self):
         class SmallBody(RequestHandler):
             def get(self):
-                self.write("a"*1024*64)
+                self.write("a" * 1024 * 64)
 
         class LargeBody(RequestHandler):
             def get(self):
-                self.write("a"*1024*100)
+                self.write("a" * 1024 * 100)
 
         return Application([('/small', SmallBody),
                             ('/large', LargeBody)])
 
     def get_http_client(self):
-        return SimpleAsyncHTTPClient(io_loop=self.io_loop, max_body_size=1024*64)
+        return SimpleAsyncHTTPClient(io_loop=self.io_loop, max_body_size=1024 * 64)
 
     def test_small_body(self):
         response = self.fetch('/small')
         response.rethrow()
-        self.assertEqual(response.body, b'a'*1024*64)
+        self.assertEqual(response.body, b'a' * 1024 * 64)
 
     def test_large_body(self):
         with ExpectLog(gen_log, "Malformed HTTP message from None: Content-Length too long"):
@@ -665,15 +723,37 @@ def get_app(self):
 
         class LargeBody(RequestHandler):
             def get(self):
-                self.write("a"*1024*100)
+                self.write("a" * 1024 * 100)
 
         return Application([('/large', LargeBody)])
 
     def get_http_client(self):
         # 100KB body with 64KB buffer
-        return SimpleAsyncHTTPClient(io_loop=self.io_loop, max_body_size=1024*100, max_buffer_size=1024*64)
+        return SimpleAsyncHTTPClient(io_loop=self.io_loop, max_body_size=1024 * 100, max_buffer_size=1024 * 64)
 
     def test_large_body(self):
         response = self.fetch('/large')
         response.rethrow()
-        self.assertEqual(response.body, b'a'*1024*100)
+        self.assertEqual(response.body, b'a' * 1024 * 100)
+
+
+class ChunkedWithContentLengthTest(AsyncHTTPTestCase):
+    def get_app(self):
+
+        class ChunkedWithContentLength(RequestHandler):
+            def get(self):
+                # Add an invalid Transfer-Encoding to the response
+                self.set_header('Transfer-Encoding', 'chunked')
+                self.write("Hello world")
+
+        return Application([('/chunkwithcl', ChunkedWithContentLength)])
+
+    def get_http_client(self):
+        return SimpleAsyncHTTPClient()
+
+    def test_chunked_with_content_length(self):
+        # Make sure the invalid headers are detected
+        with ExpectLog(gen_log, ("Malformed HTTP message from None: Response "
+                       "with both Transfer-Encoding and Content-Length")):
+            response = self.fetch('/chunkwithcl')
+        self.assertEqual(response.code, 599)
diff --git a/lib/tornado/test/static/sample.xml b/lib/tornado/test/static/sample.xml
new file mode 100644
index 0000000000..35ea0e29da
--- /dev/null
+++ b/lib/tornado/test/static/sample.xml
@@ -0,0 +1,23 @@
+
+
+    
+        1
+        2008
+        141100
+        
+        
+    
+    
+        4
+        2011
+        59900
+        
+    
+    
+        68
+        2011
+        13600
+        
+        
+    
+
diff --git a/lib/tornado/test/static/sample.xml.bz2 b/lib/tornado/test/static/sample.xml.bz2
new file mode 100644
index 0000000000000000000000000000000000000000..44dc6633324307e6834e0346eefe7874f1061b3c
GIT binary patch
literal 285
zcmV+&0pk8bT4*^jL0KkKS>2RqRsaBWUw}jqPyzpEss~trKJVY~FaaAOiqz214H^I%
z0000Osgq4W22C{aKmY&%6sD6y5NWB9VFOJD)Ow86kS3)_1!n}|LR|HTBj$IEaP;31
z;m?&8&W5YRNP|^vgqR)$6TyPAz={Y#J+F>qV@u_3X_IFvAew0?Adx~wsED2oK~j1d
z-KD{_6*6^@hl~aM+AfjHIv)IM9(sq^pDoGwIjGX>W=iEN5}~e;HI7*(MABi>RJAzL
zl(zbRufDJ^oW2kvO_M=BGjH?&w$~(5cVe7_6)<4!L249=C)Z<+{6ys
jF{A^f3W@a~AsVwqH~Day#^dKlrq0
OKi?O$L>^X}0ssI%#(xO_

literal 0
HcmV?d00001

diff --git a/lib/tornado/test/tcpserver_test.py b/lib/tornado/test/tcpserver_test.py
index 84c950769e..c01c04ddfb 100644
--- a/lib/tornado/test/tcpserver_test.py
+++ b/lib/tornado/test/tcpserver_test.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import, division, print_function, with_statement
 import socket
 
 from tornado import gen
@@ -18,7 +19,7 @@ class TestServer(TCPServer):
             def handle_stream(self, stream, address):
                 yield gen.moment
                 stream.close()
-                1/0
+                1 / 0
 
         server = client = None
         try:
diff --git a/lib/tornado/test/template_test.py b/lib/tornado/test/template_test.py
index ac1fbbd049..031b2f540a 100644
--- a/lib/tornado/test/template_test.py
+++ b/lib/tornado/test/template_test.py
@@ -173,6 +173,10 @@ def test_no_inherit_future(self):
         template = Template('{{ 1 / 2 }}')
         self.assertEqual(template.generate(), '0')
 
+    def test_non_ascii_name(self):
+        loader = DictLoader({u("t\u00e9st.html"): "hello"})
+        self.assertEqual(loader.load(u("t\u00e9st.html")).generate(), b"hello")
+
 
 class StackTraceTest(unittest.TestCase):
     def test_error_line_number_expression(self):
@@ -264,6 +268,19 @@ def test_multi_includes(self):
                             traceback.format_exc())
 
 
+class ParseErrorDetailTest(unittest.TestCase):
+    def test_details(self):
+        loader = DictLoader({
+            "foo.html": "\n\n{{",
+        })
+        with self.assertRaises(ParseError) as cm:
+            loader.load("foo.html")
+        self.assertEqual("Missing end expression }} at foo.html:3",
+                         str(cm.exception))
+        self.assertEqual("foo.html", cm.exception.filename)
+        self.assertEqual(3, cm.exception.lineno)
+
+
 class AutoEscapeTest(unittest.TestCase):
     def setUp(self):
         self.templates = {
@@ -387,7 +404,7 @@ def render(template, name):
         self.assertEqual(render("foo.py", ["not a string"]),
                          b"""s = "['not a string']"\n""")
 
-    def test_minimize_whitespace(self):
+    def test_manual_minimize_whitespace(self):
         # Whitespace including newlines is allowed within template tags
         # and directives, and this is one way to avoid long lines while
         # keeping extra whitespace out of the rendered output.
@@ -401,6 +418,62 @@ def test_minimize_whitespace(self):
         self.assertEqual(loader.load("foo.txt").generate(items=range(5)),
                          b"0, 1, 2, 3, 4")
 
+    def test_whitespace_by_filename(self):
+        # Default whitespace handling depends on the template filename.
+        loader = DictLoader({
+            "foo.html": "   \n\t\n asdf\t   ",
+            "bar.js": " \n\n\n\t qwer     ",
+            "baz.txt": "\t    zxcv\n\n",
+            "include.html": "  {% include baz.txt %} \n ",
+            "include.txt": "\t\t{% include foo.html %}    ",
+        })
+
+        # HTML and JS files have whitespace compressed by default.
+        self.assertEqual(loader.load("foo.html").generate(),
+                         b"\nasdf ")
+        self.assertEqual(loader.load("bar.js").generate(),
+                         b"\nqwer ")
+        # TXT files do not.
+        self.assertEqual(loader.load("baz.txt").generate(),
+                         b"\t    zxcv\n\n")
+
+        # Each file maintains its own status even when included in
+        # a file of the other type.
+        self.assertEqual(loader.load("include.html").generate(),
+                         b" \t    zxcv\n\n\n")
+        self.assertEqual(loader.load("include.txt").generate(),
+                         b"\t\t\nasdf     ")
+
+    def test_whitespace_by_loader(self):
+        templates = {
+            "foo.html": "\t\tfoo\n\n",
+            "bar.txt": "\t\tbar\n\n",
+        }
+        loader = DictLoader(templates, whitespace='all')
+        self.assertEqual(loader.load("foo.html").generate(), b"\t\tfoo\n\n")
+        self.assertEqual(loader.load("bar.txt").generate(), b"\t\tbar\n\n")
+
+        loader = DictLoader(templates, whitespace='single')
+        self.assertEqual(loader.load("foo.html").generate(), b" foo\n")
+        self.assertEqual(loader.load("bar.txt").generate(), b" bar\n")
+
+        loader = DictLoader(templates, whitespace='oneline')
+        self.assertEqual(loader.load("foo.html").generate(), b" foo ")
+        self.assertEqual(loader.load("bar.txt").generate(), b" bar ")
+
+    def test_whitespace_directive(self):
+        loader = DictLoader({
+            "foo.html": """\
+{% whitespace oneline %}
+    {% for i in range(3) %}
+        {{ i }}
+    {% end %}
+{% whitespace all %}
+    pre\tformatted
+"""})
+        self.assertEqual(loader.load("foo.html").generate(),
+                         b"  0  1  2  \n    pre\tformatted\n")
+
 
 class TemplateLoaderTest(unittest.TestCase):
     def setUp(self):
diff --git a/lib/tornado/test/testing_test.py b/lib/tornado/test/testing_test.py
index bcea4d252f..e00058ac34 100644
--- a/lib/tornado/test/testing_test.py
+++ b/lib/tornado/test/testing_test.py
@@ -5,11 +5,11 @@
 from tornado import gen, ioloop
 from tornado.log import app_log
 from tornado.testing import AsyncTestCase, gen_test, ExpectLog
-from tornado.test.util import unittest
-
+from tornado.test.util import unittest, skipBefore35, exec_test
 import contextlib
 import os
 import traceback
+import warnings
 
 
 @contextlib.contextmanager
@@ -58,7 +58,7 @@ def test_subsequent_wait_calls(self):
         This test makes sure that a second call to wait()
         clears the first timeout.
         """
-        self.io_loop.add_timeout(self.io_loop.time() + 0.01, self.stop)
+        self.io_loop.add_timeout(self.io_loop.time() + 0.00, self.stop)
         self.wait(timeout=0.02)
         self.io_loop.add_timeout(self.io_loop.time() + 0.03, self.stop)
         self.wait(timeout=0.15)
@@ -86,6 +86,26 @@ def test_gen(self):
         self.assertEqual(len(result.errors), 1)
         self.assertIn("should be decorated", result.errors[0][1])
 
+    @skipBefore35
+    def test_undecorated_coroutine(self):
+        namespace = exec_test(globals(), locals(), """
+        class Test(AsyncTestCase):
+            async def test_coro(self):
+                pass
+        """)
+
+        test_class = namespace['Test']
+        test = test_class('test_coro')
+        result = unittest.TestResult()
+
+        # Silence "RuntimeWarning: coroutine 'test_coro' was never awaited".
+        with warnings.catch_warnings():
+            warnings.simplefilter('ignore')
+            test.run(result)
+
+        self.assertEqual(len(result.errors), 1)
+        self.assertIn("should be decorated", result.errors[0][1])
+
     def test_undecorated_generator_with_skip(self):
         class Test(AsyncTestCase):
             @unittest.skip("don't run this")
@@ -228,5 +248,31 @@ def test_with_kwargs(self, **kwargs):
         test_with_kwargs(self, test='test')
         self.finished = True
 
+    @skipBefore35
+    def test_native_coroutine(self):
+        namespace = exec_test(globals(), locals(), """
+        @gen_test
+        async def test(self):
+            self.finished = True
+        """)
+
+        namespace['test'](self)
+
+    @skipBefore35
+    def test_native_coroutine_timeout(self):
+        # Set a short timeout and exceed it.
+        namespace = exec_test(globals(), locals(), """
+        @gen_test(timeout=0.1)
+        async def test(self):
+            await gen.sleep(1)
+        """)
+
+        try:
+            namespace['test'](self)
+            self.fail("did not get expected exception")
+        except ioloop.TimeoutError:
+            self.finished = True
+
+
 if __name__ == '__main__':
     unittest.main()
diff --git a/lib/tornado/test/twisted_test.py b/lib/tornado/test/twisted_test.py
index 22410567a8..1cff30ecbb 100644
--- a/lib/tornado/test/twisted_test.py
+++ b/lib/tornado/test/twisted_test.py
@@ -72,8 +72,8 @@
 skipIfNoTwisted = unittest.skipUnless(have_twisted,
                                       "twisted module not present")
 
-skipIfNoSingleDispatch = unittest.skipIf(
-    gen.singledispatch is None, "singledispatch module not present")
+skipIfPy26 = unittest.skipIf(sys.version_info < (2, 7),
+                             "twisted incompatible with singledispatch in py26")
 
 
 def save_signal_handlers():
@@ -495,7 +495,7 @@ def testTornadoServerTwistedClientReactor(self):
             'http://127.0.0.1:%d' % self.tornado_port, self.run_reactor)
         self.assertEqual(response, 'Hello from tornado!')
 
-    @skipIfNoSingleDispatch
+    @skipIfPy26
     def testTornadoServerTwistedCoroutineClientIOLoop(self):
         self.start_tornado_server()
         response = self.twisted_coroutine_fetch(
@@ -504,7 +504,7 @@ def testTornadoServerTwistedCoroutineClientIOLoop(self):
 
 
 @skipIfNoTwisted
-@skipIfNoSingleDispatch
+@skipIfPy26
 class ConvertDeferredTest(unittest.TestCase):
     def test_success(self):
         @inlineCallbacks
@@ -552,6 +552,10 @@ def fn():
             # with py27+, but not unittest2 on py26.
             'test_changeGID',
             'test_changeUID',
+            # This test sometimes fails with EPIPE on a call to
+            # kqueue.control. Happens consistently for me with
+            # trollius but not asyncio or other IOLoops.
+            'test_childConnectionLost',
         ],
         # Process tests appear to work on OSX 10.7, but not 10.6
         # 'twisted.internet.test.test_process.PTYProcessTestsBuilder': [
@@ -630,6 +634,24 @@ def tearDown(self):
                     os.chdir(self.__curdir)
                     shutil.rmtree(self.__tempdir)
 
+                def flushWarnings(self, *args, **kwargs):
+                    # This is a hack because Twisted and Tornado have
+                    # differing approaches to warnings in tests.
+                    # Tornado sets up a global set of warnings filters
+                    # in runtests.py, while Twisted patches the filter
+                    # list in each test. The net effect is that
+                    # Twisted's tests run with Tornado's increased
+                    # strictness (BytesWarning and ResourceWarning are
+                    # enabled) but without our filter rules to ignore those
+                    # warnings from Twisted code.
+                    filtered = []
+                    for w in super(TornadoTest, self).flushWarnings(
+                            *args, **kwargs):
+                        if w['category'] in (BytesWarning, ResourceWarning):
+                            continue
+                        filtered.append(w)
+                    return filtered
+
                 def buildReactor(self):
                     self.__saved_signals = save_signal_handlers()
                     return test_class.buildReactor(self)
@@ -658,6 +680,14 @@ def unbuildReactor(self, reactor):
     # log.startLoggingWithObserver(log.PythonLoggingObserver().emit, setStdout=0)
     # import logging; logging.getLogger('twisted').setLevel(logging.WARNING)
 
+    # Twisted recently introduced a new logger; disable that one too.
+    try:
+        from twisted.logger import globalLogBeginner
+    except ImportError:
+        pass
+    else:
+        globalLogBeginner.beginLoggingTo([])
+
 if have_twisted:
     class LayeredTwistedIOLoop(TwistedIOLoop):
         """Layers a TwistedIOLoop on top of a TornadoReactor on a SelectIOLoop.
@@ -671,7 +701,7 @@ def initialize(self, **kwargs):
             # When configured to use LayeredTwistedIOLoop we can't easily
             # get the next-best IOLoop implementation, so use the lowest common
             # denominator.
-            self.real_io_loop = SelectIOLoop()
+            self.real_io_loop = SelectIOLoop(make_current=False)
             reactor = TornadoReactor(io_loop=self.real_io_loop)
             super(LayeredTwistedIOLoop, self).initialize(reactor=reactor, **kwargs)
             self.add_callback(self.make_current)
@@ -691,7 +721,12 @@ def stop(self):
             # tornado-on-twisted-on-tornado.  I'm clearly missing something
             # about the startup/crash semantics, but since stop and crash
             # are really only used in tests it doesn't really matter.
-            self.reactor.callWhenRunning(self.reactor.crash)
+            def f():
+                self.reactor.crash()
+                # Become current again on restart. This is needed to
+                # override real_io_loop's claim to being the current loop.
+                self.add_callback(self.make_current)
+            self.reactor.callWhenRunning(f)
 
 if __name__ == "__main__":
     unittest.main()
diff --git a/lib/tornado/test/util.py b/lib/tornado/test/util.py
index 9dd9c0ce12..cfa7c81d21 100644
--- a/lib/tornado/test/util.py
+++ b/lib/tornado/test/util.py
@@ -1,8 +1,10 @@
 from __future__ import absolute_import, division, print_function, with_statement
 
 import os
+import platform
 import socket
 import sys
+import textwrap
 
 from tornado.testing import bind_unused_port
 
@@ -24,6 +26,9 @@
 skipOnTravis = unittest.skipIf('TRAVIS' in os.environ,
                                'timing tests unreliable on travis')
 
+skipOnAppEngine = unittest.skipIf('APPENGINE_RUNTIME' in os.environ,
+                                  'not available on Google App Engine')
+
 # Set the environment variable NO_NETWORK=1 to disable any tests that
 # depend on an external network.
 skipIfNoNetwork = unittest.skipIf('NO_NETWORK' in os.environ,
@@ -32,6 +37,12 @@
 skipIfNoIPv6 = unittest.skipIf(not socket.has_ipv6, 'ipv6 support not present')
 
 
+skipBefore33 = unittest.skipIf(sys.version_info < (3, 3), 'PEP 380 (yield from) not available')
+skipBefore35 = unittest.skipIf(sys.version_info < (3, 5), 'PEP 492 (async/await) not available')
+skipNotCPython = unittest.skipIf(platform.python_implementation() != 'CPython',
+                                 'Not CPython implementation')
+
+
 def refusing_port():
     """Returns a local port number that will refuse all connections.
 
@@ -50,3 +61,18 @@ def refusing_port():
     conn.close()
     server_socket.close()
     return (client_socket.close, client_addr[1])
+
+
+def exec_test(caller_globals, caller_locals, s):
+    """Execute ``s`` in a given context and return the result namespace.
+
+    Used to define functions for tests in particular python
+    versions that would be syntax errors in older versions.
+    """
+    # Flatten the real global and local namespace into our fake
+    # globals: it's all global from the perspective of code defined
+    # in s.
+    global_namespace = dict(caller_globals, **caller_locals)
+    local_namespace = {}
+    exec(textwrap.dedent(s), global_namespace, local_namespace)
+    return local_namespace
diff --git a/lib/tornado/test/web_test.py b/lib/tornado/test/web_test.py
index 9374c4824b..36312f975a 100644
--- a/lib/tornado/test/web_test.py
+++ b/lib/tornado/test/web_test.py
@@ -3,20 +3,24 @@
 from tornado import gen
 from tornado.escape import json_decode, utf8, to_unicode, recursive_unicode, native_str, to_basestring
 from tornado.httputil import format_timestamp
+from tornado.ioloop import IOLoop
 from tornado.iostream import IOStream
 from tornado import locale
 from tornado.log import app_log, gen_log
 from tornado.simple_httpclient import SimpleAsyncHTTPClient
 from tornado.template import DictLoader
-from tornado.testing import AsyncHTTPTestCase, ExpectLog, gen_test
-from tornado.test.util import unittest
+from tornado.testing import AsyncHTTPTestCase, AsyncTestCase, ExpectLog, gen_test
+from tornado.test.util import unittest, skipBefore35, exec_test
 from tornado.util import u, ObjectDict, unicode_type, timedelta_to_seconds
-from tornado.web import RequestHandler, authenticated, Application, asynchronous, url, HTTPError, StaticFileHandler, _create_signature_v1, create_signed_value, decode_signed_value, ErrorHandler, UIModule, MissingArgumentError, stream_request_body, Finish, removeslash, addslash, RedirectHandler as WebRedirectHandler, get_signature_key_version
+from tornado.web import RequestHandler, authenticated, Application, asynchronous, url, HTTPError, StaticFileHandler, _create_signature_v1, create_signed_value, decode_signed_value, ErrorHandler, UIModule, MissingArgumentError, stream_request_body, Finish, removeslash, addslash, RedirectHandler as WebRedirectHandler, get_signature_key_version, GZipContentEncoding
 
 import binascii
 import contextlib
+import copy
 import datetime
 import email.utils
+import gzip
+from io import BytesIO
 import itertools
 import logging
 import os
@@ -570,8 +574,8 @@ def get(self):
 
 
 class EmptyFlushCallbackHandler(RequestHandler):
-    @gen.engine
     @asynchronous
+    @gen.engine
     def get(self):
         # Ensure that the flush callback is run whether or not there
         # was any output.  The gen.Task and direct yield forms are
@@ -967,7 +971,8 @@ def get(self, path):
 
         return [('/static_url/(.*)', StaticUrlHandler),
                 ('/abs_static_url/(.*)', AbsoluteStaticUrlHandler),
-                ('/override_static_url/(.*)', OverrideStaticUrlHandler)]
+                ('/override_static_url/(.*)', OverrideStaticUrlHandler),
+                ('/root_static/(.*)', StaticFileHandler, dict(path='/'))]
 
     def get_app_kwargs(self):
         return dict(static_path=relpath('static'))
@@ -978,6 +983,19 @@ def test_static_files(self):
 
         response = self.fetch('/static/robots.txt')
         self.assertTrue(b"Disallow: /" in response.body)
+        self.assertEqual(response.headers.get("Content-Type"), "text/plain")
+
+    def test_static_compressed_files(self):
+        response = self.fetch("/static/sample.xml.gz")
+        self.assertEqual(response.headers.get("Content-Type"),
+                         "application/gzip")
+        response = self.fetch("/static/sample.xml.bz2")
+        self.assertEqual(response.headers.get("Content-Type"),
+                         "application/octet-stream")
+        # make sure the uncompressed file still has the correct type
+        response = self.fetch("/static/sample.xml")
+        self.assertTrue(response.headers.get("Content-Type")
+                        in set(("text/xml", "application/xml")))
 
     def test_static_url(self):
         response = self.fetch("/static_url/robots.txt")
@@ -1182,6 +1200,10 @@ def test_static_404(self):
         self.assertEqual(response.code, 404)
 
     def test_path_traversal_protection(self):
+        # curl_httpclient processes ".." on the client side, so we
+        # must test this with simple_httpclient.
+        self.http_client.close()
+        self.http_client = SimpleAsyncHTTPClient()
         with ExpectLog(gen_log, ".*not in root static directory"):
             response = self.get_and_head('/static/../static_foo.txt')
         # Attempted path traversal should result in 403, not 200
@@ -1190,6 +1212,17 @@ def test_path_traversal_protection(self):
         # is probably a packaging error).
         self.assertEqual(response.code, 403)
 
+    @unittest.skipIf(os.name != 'posix', 'non-posix OS')
+    def test_root_static_path(self):
+        # Sometimes people set the StaticFileHandler's path to '/'
+        # to disable Tornado's path validation (in conjunction with
+        # their own validation in get_absolute_path). Make sure
+        # that the stricter validation in 4.2.1 doesn't break them.
+        path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
+                            'static/robots.txt')
+        response = self.get_and_head('/root_static' + urllib_parse.quote(path))
+        self.assertEqual(response.code, 200)
+
 
 @wsgi_safe
 class StaticDefaultFilenameTest(WebTestCase):
@@ -1470,7 +1503,8 @@ class Handler(RequestHandler):
         def get(self):
             if self.get_argument('vary', None):
                 self.set_header('Vary', self.get_argument('vary'))
-            self.write('hello world')
+            # Must write at least MIN_LENGTH bytes to activate compression.
+            self.write('hello world' + ('!' * GZipContentEncoding.MIN_LENGTH))
 
     def get_app_kwargs(self):
         return dict(
@@ -1547,8 +1581,11 @@ def get(self):
     def test_clear_all_cookies(self):
         response = self.fetch('/', headers={'Cookie': 'foo=bar; baz=xyzzy'})
         set_cookies = sorted(response.headers.get_list('Set-Cookie'))
-        self.assertTrue(set_cookies[0].startswith('baz=;'))
-        self.assertTrue(set_cookies[1].startswith('foo=;'))
+        # Python 3.5 sends 'baz="";'; older versions use 'baz=;'
+        self.assertTrue(set_cookies[0].startswith('baz=;') or
+                        set_cookies[0].startswith('baz="";'))
+        self.assertTrue(set_cookies[1].startswith('foo=;') or
+                        set_cookies[1].startswith('foo="";'))
 
 
 class PermissionError(Exception):
@@ -1609,10 +1646,10 @@ def test_known_error(self):
 class BuggyLoggingTest(SimpleHandlerTestCase):
     class Handler(RequestHandler):
         def get(self):
-            1/0
+            1 / 0
 
         def log_exception(self, typ, value, tb):
-            1/0
+            1 / 0
 
     def test_buggy_log_exception(self):
         # Something gets logged even though the application's
@@ -2064,59 +2101,55 @@ def test_close_during_upload(self):
         yield self.close_future
 
 
-class StreamingRequestFlowControlTest(WebTestCase):
-    def get_handlers(self):
-        from tornado.ioloop import IOLoop
-
-        # Each method in this handler returns a Future and yields to the
-        # IOLoop so the future is not immediately ready.  Ensure that the
-        # Futures are respected and no method is called before the previous
-        # one has completed.
-        @stream_request_body
-        class FlowControlHandler(RequestHandler):
-            def initialize(self, test):
-                self.test = test
-                self.method = None
-                self.methods = []
-
-            @contextlib.contextmanager
-            def in_method(self, method):
-                if self.method is not None:
-                    self.test.fail("entered method %s while in %s" %
-                                   (method, self.method))
-                self.method = method
-                self.methods.append(method)
-                try:
-                    yield
-                finally:
-                    self.method = None
-
-            @gen.coroutine
-            def prepare(self):
-                # Note that asynchronous prepare() does not block data_received,
-                # so we don't use in_method here.
-                self.methods.append('prepare')
-                yield gen.Task(IOLoop.current().add_callback)
+# Each method in this handler returns a yieldable object and yields to the
+# IOLoop so the future is not immediately ready.  Ensure that the
+# yieldables are respected and no method is called before the previous
+# one has completed.
+@stream_request_body
+class BaseFlowControlHandler(RequestHandler):
+    def initialize(self, test):
+        self.test = test
+        self.method = None
+        self.methods = []
+
+    @contextlib.contextmanager
+    def in_method(self, method):
+        if self.method is not None:
+            self.test.fail("entered method %s while in %s" %
+                           (method, self.method))
+        self.method = method
+        self.methods.append(method)
+        try:
+            yield
+        finally:
+            self.method = None
 
-            @gen.coroutine
-            def data_received(self, data):
-                with self.in_method('data_received'):
-                    yield gen.Task(IOLoop.current().add_callback)
+    @gen.coroutine
+    def prepare(self):
+        # Note that asynchronous prepare() does not block data_received,
+        # so we don't use in_method here.
+        self.methods.append('prepare')
+        yield gen.Task(IOLoop.current().add_callback)
 
-            @gen.coroutine
-            def post(self):
-                with self.in_method('post'):
-                    yield gen.Task(IOLoop.current().add_callback)
-                self.write(dict(methods=self.methods))
+    @gen.coroutine
+    def post(self):
+        with self.in_method('post'):
+            yield gen.Task(IOLoop.current().add_callback)
+        self.write(dict(methods=self.methods))
 
-        return [('/', FlowControlHandler, dict(test=self))]
 
+class BaseStreamingRequestFlowControlTest(object):
     def get_httpserver_options(self):
         # Use a small chunk size so flow control is relevant even though
         # all the data arrives at once.
-        return dict(chunk_size=10)
+        return dict(chunk_size=10, decompress_request=True)
 
-    def test_flow_control(self):
+    def get_http_client(self):
+        # simple_httpclient only: curl doesn't support body_producer.
+        return SimpleAsyncHTTPClient(io_loop=self.io_loop)
+
+    # Test all the slightly different code paths for fixed, chunked, etc bodies.
+    def test_flow_control_fixed_body(self):
         response = self.fetch('/', body='abcdefghijklmnopqrstuvwxyz',
                               method='POST')
         response.rethrow()
@@ -2125,6 +2158,58 @@ def test_flow_control(self):
                                        'data_received', 'data_received',
                                        'post']))
 
+    def test_flow_control_chunked_body(self):
+        chunks = [b'abcd', b'efgh', b'ijkl']
+        @gen.coroutine
+        def body_producer(write):
+            for i in chunks:
+                yield write(i)
+        response = self.fetch('/', body_producer=body_producer, method='POST')
+        response.rethrow()
+        self.assertEqual(json_decode(response.body),
+                         dict(methods=['prepare', 'data_received',
+                                       'data_received', 'data_received',
+                                       'post']))
+
+    def test_flow_control_compressed_body(self):
+        bytesio = BytesIO()
+        gzip_file = gzip.GzipFile(mode='w', fileobj=bytesio)
+        gzip_file.write(b'abcdefghijklmnopqrstuvwxyz')
+        gzip_file.close()
+        compressed_body = bytesio.getvalue()
+        response = self.fetch('/', body=compressed_body, method='POST',
+                              headers={'Content-Encoding': 'gzip'})
+        response.rethrow()
+        self.assertEqual(json_decode(response.body),
+                         dict(methods=['prepare', 'data_received',
+                                       'data_received', 'data_received',
+                                       'post']))
+
+class DecoratedStreamingRequestFlowControlTest(
+        BaseStreamingRequestFlowControlTest,
+        WebTestCase):
+    def get_handlers(self):
+        class DecoratedFlowControlHandler(BaseFlowControlHandler):
+            @gen.coroutine
+            def data_received(self, data):
+                with self.in_method('data_received'):
+                    yield gen.Task(IOLoop.current().add_callback)
+        return [('/', DecoratedFlowControlHandler, dict(test=self))]
+
+
+@skipBefore35
+class NativeStreamingRequestFlowControlTest(
+        BaseStreamingRequestFlowControlTest,
+        WebTestCase):
+    def get_handlers(self):
+        class NativeFlowControlHandler(BaseFlowControlHandler):
+            data_received = exec_test(globals(), locals(), """
+            async def data_received(self, data):
+                with self.in_method('data_received'):
+                    await gen.Task(IOLoop.current().add_callback)
+            """)["data_received"]
+        return [('/', NativeFlowControlHandler, dict(test=self))]
+
 
 @wsgi_safe
 class IncorrectContentLengthTest(SimpleHandlerTestCase):
@@ -2510,21 +2595,40 @@ def test_versioning(self):
             self.assertEqual(response.code, 200)
 
 
+@wsgi_safe
+class XSRFCookieKwargsTest(SimpleHandlerTestCase):
+    class Handler(RequestHandler):
+        def get(self):
+            self.write(self.xsrf_token)
+
+    def get_app_kwargs(self):
+        return dict(xsrf_cookies=True,
+                    xsrf_cookie_kwargs=dict(httponly=True))
+
+    def test_xsrf_httponly(self):
+        response = self.fetch("/")
+        self.assertIn('httponly;', response.headers['Set-Cookie'].lower())
+
+
 @wsgi_safe
 class FinishExceptionTest(SimpleHandlerTestCase):
     class Handler(RequestHandler):
         def get(self):
             self.set_status(401)
             self.set_header('WWW-Authenticate', 'Basic realm="something"')
-            self.write('authentication required')
-            raise Finish()
+            if self.get_argument('finish_value', ''):
+                raise Finish('authentication required')
+            else:
+                self.write('authentication required')
+                raise Finish()
 
     def test_finish_exception(self):
-        response = self.fetch('/')
-        self.assertEqual(response.code, 401)
-        self.assertEqual('Basic realm="something"',
-                         response.headers.get('WWW-Authenticate'))
-        self.assertEqual(b'authentication required', response.body)
+        for url in ['/', '/?finish_value=1']:
+            response = self.fetch(url)
+            self.assertEqual(response.code, 401)
+            self.assertEqual('Basic realm="something"',
+                             response.headers.get('WWW-Authenticate'))
+            self.assertEqual(b'authentication required', response.body)
 
 
 @wsgi_safe
@@ -2643,3 +2747,19 @@ def get(self):
     def test_missing_remote_ip(self):
         resp = self.fetch("/")
         self.assertEqual(resp.body, b"GET / (None)")
+
+
+class HTTPErrorTest(unittest.TestCase):
+    def test_copy(self):
+        e = HTTPError(403, reason="Go away")
+        e2 = copy.copy(e)
+        self.assertIsNot(e, e2)
+        self.assertEqual(e.status_code, e2.status_code)
+        self.assertEqual(e.reason, e2.reason)
+
+
+class ApplicationTest(AsyncTestCase):
+    def test_listen(self):
+        app = Application([])
+        server = app.listen(0, address='127.0.0.1')
+        server.stop()
diff --git a/lib/tornado/test/websocket_test.py b/lib/tornado/test/websocket_test.py
index 23a4324ce6..7b47214df7 100644
--- a/lib/tornado/test/websocket_test.py
+++ b/lib/tornado/test/websocket_test.py
@@ -130,7 +130,7 @@ def test_http_request(self):
     @gen_test
     def test_websocket_gen(self):
         ws = yield self.ws_connect('/echo')
-        ws.write_message('hello')
+        yield ws.write_message('hello')
         response = yield ws.read_message()
         self.assertEqual(response, 'hello')
         yield self.close(ws)
diff --git a/lib/tornado/testing.py b/lib/tornado/testing.py
index 93f0dbe141..54d76fe40f 100644
--- a/lib/tornado/testing.py
+++ b/lib/tornado/testing.py
@@ -34,19 +34,30 @@
 from tornado.stack_context import ExceptionStackContext
 from tornado.util import raise_exc_info, basestring_type
 import functools
+import inspect
 import logging
 import os
 import re
 import signal
 import socket
 import sys
-import types
 
 try:
     from cStringIO import StringIO  # py2
 except ImportError:
     from io import StringIO  # py3
 
+try:
+    from collections.abc import Generator as GeneratorType  # py35+
+except ImportError:
+    from types import GeneratorType
+
+if sys.version_info >= (3, 5):
+    iscoroutine = inspect.iscoroutine
+    iscoroutinefunction = inspect.iscoroutinefunction
+else:
+    iscoroutine = iscoroutinefunction = lambda f: False
+
 # Tornado's own test suite requires the updated unittest module
 # (either py27+ or unittest2) so tornado.test.util enforces
 # this requirement, but for other users of tornado.testing we want
@@ -81,12 +92,13 @@ def get_unused_port():
     return port
 
 
-def bind_unused_port():
+def bind_unused_port(reuse_port=False):
     """Binds a server socket to an available port on localhost.
 
     Returns a tuple (socket, port).
     """
-    [sock] = netutil.bind_sockets(None, 'localhost', family=socket.AF_INET)
+    [sock] = netutil.bind_sockets(None, 'localhost', family=socket.AF_INET,
+                                  reuse_port=reuse_port)
     port = sock.getsockname()[1]
     return sock, port
 
@@ -118,9 +130,9 @@ def __init__(self, orig_method):
 
     def __call__(self, *args, **kwargs):
         result = self.orig_method(*args, **kwargs)
-        if isinstance(result, types.GeneratorType):
-            raise TypeError("Generator test methods should be decorated with "
-                            "tornado.testing.gen_test")
+        if isinstance(result, GeneratorType) or iscoroutine(result):
+            raise TypeError("Generator and coroutine test methods should be"
+                            " decorated with tornado.testing.gen_test")
         elif result is not None:
             raise ValueError("Return value from test method ignored: %r" %
                              result)
@@ -331,20 +343,29 @@ class AsyncHTTPTestCase(AsyncTestCase):
     Tests will typically use the provided ``self.http_client`` to fetch
     URLs from this server.
 
-    Example::
+    Example, assuming the "Hello, world" example from the user guide is in
+    ``hello.py``::
+
+        import hello
 
-        class MyHTTPTest(AsyncHTTPTestCase):
+        class TestHelloApp(AsyncHTTPTestCase):
             def get_app(self):
-                return Application([('/', MyHandler)...])
+                return hello.make_app()
 
             def test_homepage(self):
-                # The following two lines are equivalent to
-                #   response = self.fetch('/')
-                # but are shown in full here to demonstrate explicit use
-                # of self.stop and self.wait.
-                self.http_client.fetch(self.get_url('/'), self.stop)
-                response = self.wait()
-                # test contents of response
+                response = self.fetch('/')
+                self.assertEqual(response.code, 200)
+                self.assertEqual(response.body, 'Hello, world')
+
+    That call to ``self.fetch()`` is equivalent to ::
+
+        self.http_client.fetch(self.get_url('/'), self.stop)
+        response = self.wait()
+
+    which illustrates how AsyncTestCase can turn an asynchronous operation,
+    like ``http_client.fetch()``, into a synchronous operation. If you need
+    to do other asynchronous operations in tests, you'll probably need to use
+    ``stop()`` and ``wait()`` yourself.
     """
     def setUp(self):
         super(AsyncHTTPTestCase, self).setUp()
@@ -485,13 +506,16 @@ def wrap(f):
         @functools.wraps(f)
         def pre_coroutine(self, *args, **kwargs):
             result = f(self, *args, **kwargs)
-            if isinstance(result, types.GeneratorType):
+            if isinstance(result, GeneratorType) or iscoroutine(result):
                 self._test_generator = result
             else:
                 self._test_generator = None
             return result
 
-        coro = gen.coroutine(pre_coroutine)
+        if iscoroutinefunction(f):
+            coro = pre_coroutine
+        else:
+            coro = gen.coroutine(pre_coroutine)
 
         @functools.wraps(coro)
         def post_coroutine(self, *args, **kwargs):
@@ -501,8 +525,8 @@ def post_coroutine(self, *args, **kwargs):
                     timeout=timeout)
             except TimeoutError as e:
                 # run_sync raises an error with an unhelpful traceback.
-                # If we throw it back into the generator the stack trace
-                # will be replaced by the point where the test is stopped.
+                # Throw it back into the generator or coroutine so the stack
+                # trace is replaced by the point where the test is stopped.
                 self._test_generator.throw(e)
                 # In case the test contains an overly broad except clause,
                 # we may get back here.  In this case re-raise the original
@@ -575,10 +599,16 @@ class ExpectLog(logging.Filter):
     Useful to make tests of error conditions less noisy, while still
     leaving unexpected log entries visible.  *Not thread safe.*
 
+    The attribute ``logged_stack`` is set to true if any exception
+    stack trace was logged.
+
     Usage::
 
         with ExpectLog('tornado.application', "Uncaught exception"):
             error_response = self.fetch("/some_page")
+
+    .. versionchanged:: 4.3
+       Added the ``logged_stack`` attribute.
     """
     def __init__(self, logger, regex, required=True):
         """Constructs an ExpectLog context manager.
@@ -596,8 +626,11 @@ def __init__(self, logger, regex, required=True):
         self.regex = re.compile(regex)
         self.required = required
         self.matched = False
+        self.logged_stack = False
 
     def filter(self, record):
+        if record.exc_info:
+            self.logged_stack = True
         message = record.getMessage()
         if self.regex.match(message):
             self.matched = True
@@ -606,6 +639,7 @@ def filter(self, record):
 
     def __enter__(self):
         self.logger.addFilter(self)
+        return self
 
     def __exit__(self, typ, value, tb):
         self.logger.removeFilter(self)
diff --git a/lib/tornado/util.py b/lib/tornado/util.py
index 606ced1973..a67ddf50db 100644
--- a/lib/tornado/util.py
+++ b/lib/tornado/util.py
@@ -13,7 +13,6 @@
 from __future__ import absolute_import, division, print_function, with_statement
 
 import array
-import inspect
 import os
 import sys
 import zlib
@@ -24,6 +23,13 @@
 except NameError:
     xrange = range  # py3
 
+# inspect.getargspec() raises DeprecationWarnings in Python 3.5.
+# The two functions have compatible interfaces for the parts we need.
+try:
+    from inspect import getfullargspec as getargspec  # py3
+except ImportError:
+    from inspect import getargspec  # py2
+
 
 class ObjectDict(dict):
     """Makes a dictionary behave like an object, with attribute-style access.
@@ -284,11 +290,26 @@ class ArgReplacer(object):
     def __init__(self, func, name):
         self.name = name
         try:
-            self.arg_pos = inspect.getargspec(func).args.index(self.name)
+            self.arg_pos = self._getargnames(func).index(name)
         except ValueError:
             # Not a positional parameter
             self.arg_pos = None
 
+    def _getargnames(self, func):
+        try:
+            return getargspec(func).args
+        except TypeError:
+            if hasattr(func, 'func_code'):
+                # Cython-generated code has all the attributes needed
+                # by inspect.getargspec, but the inspect module only
+                # works with ordinary functions. Inline the portion of
+                # getargspec that we need here. Note that for static
+                # functions the @cython.binding(True) decorator must
+                # be used (for methods it works out of the box).
+                code = func.func_code
+                return code.co_varnames[:code.co_argcount]
+            raise
+
     def get_old_value(self, args, kwargs, default=None):
         """Returns the old value of the named argument without replacing it.
 
diff --git a/lib/tornado/web.py b/lib/tornado/web.py
index 9847bb02e9..1cacd7c0a8 100644
--- a/lib/tornado/web.py
+++ b/lib/tornado/web.py
@@ -56,9 +56,7 @@ def get(self):
 
 """
 
-from __future__ import (absolute_import, division,
-                        print_function, with_statement)
-
+from __future__ import absolute_import, division, print_function, with_statement
 
 import base64
 import binascii
@@ -81,7 +79,7 @@ def get(self):
 import types
 from io import BytesIO
 
-from tornado.concurrent import Future, is_future
+from tornado.concurrent import Future
 from tornado import escape
 from tornado import gen
 from tornado import httputil
@@ -362,10 +360,8 @@ def _convert_header_value(self, value):
         else:
             raise TypeError("Unsupported header value %r" % value)
         # If \n is allowed into the header, it is possible to inject
-        # additional headers or split the request. Also cap length to
-        # prevent obviously erroneous values.
-        if (len(value) > 4000 or
-                RequestHandler._INVALID_HEADER_CHAR_RE.search(value)):
+        # additional headers or split the request.
+        if RequestHandler._INVALID_HEADER_CHAR_RE.search(value):
             raise ValueError("Unsafe header value %r", value)
         return value
 
@@ -651,7 +647,6 @@ def get_secure_cookie_key_version(self, name, value=None):
             value = self.get_cookie(name)
         return get_signature_key_version(value)
 
-
     def redirect(self, url, permanent=False, status=None):
         """Sends a redirect to the given (optionally relative) URL.
 
@@ -838,8 +833,9 @@ def create_template_loader(self, template_path):
 
         May be overridden by subclasses.  By default returns a
         directory-based loader on the given path, using the
-        ``autoescape`` application setting.  If a ``template_loader``
-        application setting is supplied, uses that instead.
+        ``autoescape`` and ``template_whitespace`` application
+        settings.  If a ``template_loader`` application setting is
+        supplied, uses that instead.
         """
         settings = self.application.settings
         if "template_loader" in settings:
@@ -849,6 +845,8 @@ def create_template_loader(self, template_path):
             # autoescape=None means "no escaping", so we have to be sure
             # to only pass this kwarg if the user asked for it.
             kwargs["autoescape"] = settings["autoescape"]
+        if "template_whitespace" in settings:
+            kwargs["whitespace"] = settings["template_whitespace"]
         return template.Loader(template_path, **kwargs)
 
     def flush(self, include_footers=False, callback=None):
@@ -1065,12 +1063,33 @@ def get_browser_locale(self, default="en_US"):
     def current_user(self):
         """The authenticated user for this request.
 
-        This is a cached version of `get_current_user`, which you can
-        override to set the user based on, e.g., a cookie. If that
-        method is not overridden, this method always returns None.
+        This is set in one of two ways:
+
+        * A subclass may override `get_current_user()`, which will be called
+          automatically the first time ``self.current_user`` is accessed.
+          `get_current_user()` will only be called once per request,
+          and is cached for future access::
+
+              def get_current_user(self):
+                  user_cookie = self.get_secure_cookie("user")
+                      if user_cookie:
+                          return json.loads(user_cookie)
+                  return None
+
+        * It may be set as a normal variable, typically from an overridden
+          `prepare()`::
+
+              @gen.coroutine
+              def prepare(self):
+                  user_id_cookie = self.get_secure_cookie("user_id")
+                  if user_id_cookie:
+                      self.current_user = yield load_user(user_id_cookie)
 
-        We lazy-load the current user the first time this method is called
-        and cache the result after that.
+        Note that `prepare()` may be a coroutine while `get_current_user()`
+        may not, so the latter form is necessary if loading the user requires
+        asynchronous operations.
+
+        The user object may any type of the application's choosing.
         """
         if not hasattr(self, "_current_user"):
             self._current_user = self.get_current_user()
@@ -1081,7 +1100,10 @@ def current_user(self, value):
         self._current_user = value
 
     def get_current_user(self):
-        """Override to determine the current user from, e.g., a cookie."""
+        """Override to determine the current user from, e.g., a cookie.
+
+        This method may not be a coroutine.
+        """
         return None
 
     def get_login_url(self):
@@ -1119,10 +1141,19 @@ def xsrf_token(self):
            cookies will be converted to version 2 when this method is called
            unless the ``xsrf_cookie_version`` `Application` setting is
            set to 1.
+
+        .. versionchanged:: 4.3
+           The ``xsrf_cookie_kwargs`` `Application` setting may be
+           used to supply additional cookie options (which will be
+           passed directly to `set_cookie`). For example,
+           ``xsrf_cookie_kwargs=dict(httponly=True, secure=True)``
+           will set the ``secure`` and ``httponly`` flags on the
+           ``_xsrf`` cookie.
         """
         if not hasattr(self, "_xsrf_token"):
             version, token, timestamp = self._get_raw_xsrf_token()
             output_version = self.settings.get("xsrf_cookie_version", 2)
+            cookie_kwargs = self.settings.get("xsrf_cookie_kwargs", {})
             if output_version == 1:
                 self._xsrf_token = binascii.b2a_hex(token)
             elif output_version == 2:
@@ -1138,7 +1169,8 @@ def xsrf_token(self):
             if version is None:
                 expires_days = 30 if self.current_user else None
                 self.set_cookie("_xsrf", self._xsrf_token,
-                                expires_days=expires_days)
+                                expires_days=expires_days,
+                                **cookie_kwargs)
         return self._xsrf_token
 
     def _get_raw_xsrf_token(self):
@@ -1388,10 +1420,8 @@ def _execute(self, transforms, *args, **kwargs):
                 self.check_xsrf_cookie()
 
             result = self.prepare()
-            if is_future(result):
-                result = yield result
             if result is not None:
-                raise TypeError("Expected None, got %r" % result)
+                result = yield result
             if self._prepared_future is not None:
                 # Tell the Application we've finished with prepare()
                 # and are ready for the body to arrive.
@@ -1411,10 +1441,8 @@ def _execute(self, transforms, *args, **kwargs):
 
             method = getattr(self, self.request.method.lower())
             result = method(*self.path_args, **self.path_kwargs)
-            if is_future(result):
-                result = yield result
             if result is not None:
-                raise TypeError("Expected None, got %r" % result)
+                result = yield result
             if self._auto_finish and not self._finished:
                 self.finish()
         except Exception as e:
@@ -1453,7 +1481,7 @@ def _handle_request_exception(self, e):
         if isinstance(e, Finish):
             # Not an error; just finish the request without logging.
             if not self._finished:
-                self.finish()
+                self.finish(*e.args)
             return
         try:
             self.log_exception(*sys.exc_info())
@@ -1557,9 +1585,12 @@ def _on_download(self, response):
     .. testoutput::
        :hide:
 
-    .. versionadded:: 3.1
+    .. versionchanged:: 3.1
        The ability to use ``@gen.coroutine`` without ``@asynchronous``.
 
+    .. versionchanged:: 4.3 Returning anything but ``None`` or a
+       yieldable object from a method decorated with ``@asynchronous``
+       is an error. Such return values were previously ignored silently.
     """
     # Delay the IOLoop import because it's not available on app engine.
     from tornado.ioloop import IOLoop
@@ -1570,7 +1601,8 @@ def wrapper(self, *args, **kwargs):
         with stack_context.ExceptionStackContext(
                 self._stack_context_handle_exception):
             result = method(self, *args, **kwargs)
-            if is_future(result):
+            if result is not None:
+                result = gen.convert_yielded(result)
                 # If @asynchronous is used with @gen.coroutine, (but
                 # not @gen.engine), we can automatically finish the
                 # request when the future resolves.  Additionally,
@@ -1691,7 +1723,7 @@ class Application(httputil.HTTPServerConnectionDelegate):
     (fully-qualified) name.
 
     Each tuple can contain additional elements, which correspond to the
-    arguments to the `URLSpec` constructor.  (Prior to Tornado 3.2, this
+    arguments to the `URLSpec` constructor.  (Prior to Tornado 3.2,
     only tuples of two or three elements were allowed).
 
     A dictionary may be passed as the third element of the tuple,
@@ -1780,12 +1812,18 @@ def listen(self, port, address="", **kwargs):
 
         Note that after calling this method you still need to call
         ``IOLoop.current().start()`` to start the server.
+
+        Returns the `.HTTPServer` object.
+
+        .. versionchanged:: 4.3
+           Now returns the `.HTTPServer` object.
         """
         # import is here rather than top level because HTTPServer
         # is not importable on appengine
         from tornado.httpserver import HTTPServer
         server = HTTPServer(self, **kwargs)
         server.listen(port, address)
+        return server
 
     def add_handlers(self, host_pattern, host_handlers):
         """Appends the given handlers to our handler list.
@@ -2013,8 +2051,8 @@ def execute(self):
         # except handler, and we cannot easily access the IOLoop here to
         # call add_future (because of the requirement to remain compatible
         # with WSGI)
-        f = self.handler._execute(transforms, *self.path_args,
-                                  **self.path_kwargs)
+        self.handler._execute(transforms, *self.path_args,
+                              **self.path_kwargs)
         # If we are streaming the request body, then execute() is finished
         # when the handler has prepared to receive the body.  If not,
         # it doesn't matter when execute() finishes (so we return None)
@@ -2043,7 +2081,7 @@ class HTTPError(Exception):
         determined automatically from ``status_code``, but can be used
         to use a non-standard numeric code.
     """
-    def __init__(self, status_code, log_message=None, *args, **kwargs):
+    def __init__(self, status_code=500, log_message=None, *args, **kwargs):
         self.status_code = status_code
         self.log_message = log_message
         self.args = args
@@ -2064,10 +2102,14 @@ def __str__(self):
 class Finish(Exception):
     """An exception that ends the request without producing an error response.
 
-    When `Finish` is raised in a `RequestHandler`, the request will end
-    (calling `RequestHandler.finish` if it hasn't already been called),
-    but the outgoing response will not be modified and the error-handling
-    methods (including `RequestHandler.write_error`) will not be called.
+    When `Finish` is raised in a `RequestHandler`, the request will
+    end (calling `RequestHandler.finish` if it hasn't already been
+    called), but the error-handling methods (including
+    `RequestHandler.write_error`) will not be called.
+
+    If `Finish()` was created with no arguments, the pending response
+    will be sent as-is. If `Finish()` was given an argument, that
+    argument will be passed to `RequestHandler.finish()`.
 
     This can be a more convenient way to implement custom error pages
     than overriding ``write_error`` (especially in library code)::
@@ -2076,6 +2118,10 @@ class Finish(Exception):
             self.set_status(401)
             self.set_header('WWW-Authenticate', 'Basic realm="something"')
             raise Finish()
+
+    .. versionchanged:: 4.3
+       Arguments passed to ``Finish()`` will be passed on to
+       `RequestHandler.finish`.
     """
     pass
 
@@ -2148,6 +2194,11 @@ class StaticFileHandler(RequestHandler):
     the ``path`` argument to the get() method (different than the constructor
     argument above); see `URLSpec` for details.
 
+    To serve a file like ``index.html`` automatically when a directory is
+    requested, set ``static_handler_args=dict(default_filename="index.html")``
+    in your application settings, or add ``default_filename`` as an initializer
+    argument for your ``StaticFileHandler``.
+
     To maximize the effectiveness of browser caching, this class supports
     versioned urls (by default using the argument ``?v=``).  If a version
     is given, we instruct the browser to cache this file indefinitely.
@@ -2159,8 +2210,7 @@ class StaticFileHandler(RequestHandler):
     a dedicated static file server (such as nginx or Apache).  We support
     the HTTP ``Accept-Ranges`` mechanism to return partial content (because
     some browsers require this functionality to be present to seek in
-    HTML5 audio or video), but this handler should not be used with
-    files that are too large to fit comfortably in memory.
+    HTML5 audio or video).
 
     **Subclassing notes**
 
@@ -2380,7 +2430,14 @@ def validate_absolute_path(self, root, absolute_path):
         # We must add it back to `root` so that we only match files
         # in a directory named `root` instead of files starting with
         # that prefix.
-        root = os.path.abspath(root) + os.path.sep
+        root = os.path.abspath(root)
+        if not root.endswith(os.path.sep):
+            # abspath always removes a trailing slash, except when
+            # root is '/'. This is an unusual case, but several projects
+            # have independently discovered this technique to disable
+            # Tornado's path validation and (hopefully) do their own,
+            # so we need to support it.
+            root += os.path.sep
         # The trailing slash also needs to be temporarily added back
         # the requested path so a request to root/ will match.
         if not (absolute_path + os.path.sep).startswith(root):
@@ -2494,7 +2551,19 @@ def get_content_type(self):
         .. versionadded:: 3.1
         """
         mime_type, encoding = mimetypes.guess_type(self.absolute_path)
-        return mime_type
+        # per RFC 6713, use the appropriate type for a gzip compressed file
+        if encoding == "gzip":
+            return "application/gzip"
+        # As of 2015-07-21 there is no bzip2 encoding defined at
+        # http://www.iana.org/assignments/media-types/media-types.xhtml
+        # So for that (and any other encoding), use octet-stream.
+        elif encoding is not None:
+            return "application/octet-stream"
+        elif mime_type is not None:
+            return mime_type
+        # if mime_type not detected, use application/octet-stream
+        else:
+            return "application/octet-stream"
 
     def set_extra_headers(self, path):
         """For subclass to add extra headers to the response"""
@@ -2645,7 +2714,16 @@ class GZipContentEncoding(OutputTransform):
     CONTENT_TYPES = set(["application/javascript", "application/x-javascript",
                          "application/xml", "application/atom+xml",
                          "application/json", "application/xhtml+xml"])
-    MIN_LENGTH = 5
+    # Python's GzipFile defaults to level 9, while most other gzip
+    # tools (including gzip itself) default to 6, which is probably a
+    # better CPU/size tradeoff.
+    GZIP_LEVEL = 6
+    # Responses that are too short are unlikely to benefit from gzipping
+    # after considering the "Content-Encoding: gzip" header and the header
+    # inside the gzip encoding.
+    # Note that responses written in multiple chunks will be compressed
+    # regardless of size.
+    MIN_LENGTH = 1024
 
     def __init__(self, request):
         self._gzipping = "gzip" in request.headers.get("Accept-Encoding", "")
@@ -2666,7 +2744,8 @@ def transform_first_chunk(self, status_code, headers, chunk, finishing):
         if self._gzipping:
             headers["Content-Encoding"] = "gzip"
             self._gzip_value = BytesIO()
-            self._gzip_file = gzip.GzipFile(mode="w", fileobj=self._gzip_value)
+            self._gzip_file = gzip.GzipFile(mode="w", fileobj=self._gzip_value,
+                                            compresslevel=self.GZIP_LEVEL)
             chunk = self.transform_chunk(chunk, finishing)
             if "Content-Length" in headers:
                 # The original content length is no longer correct.
diff --git a/lib/tornado/websocket.py b/lib/tornado/websocket.py
index 2f57b99093..11e526687d 100644
--- a/lib/tornado/websocket.py
+++ b/lib/tornado/websocket.py
@@ -16,8 +16,7 @@
    Removed support for the draft 76 protocol version.
 """
 
-from __future__ import (absolute_import, division,
-                        print_function, with_statement)
+from __future__ import absolute_import, division, print_function, with_statement
 # Author: Jacob Kristhammar, 2010
 
 import base64
@@ -208,12 +207,15 @@ def write_message(self, message, binary=False):
         .. versionchanged:: 3.2
            `WebSocketClosedError` was added (previously a closed connection
            would raise an `AttributeError`)
+
+        .. versionchanged:: 4.3
+           Returns a `.Future` which can be used for flow control.
         """
         if self.ws_connection is None:
             raise WebSocketClosedError()
         if isinstance(message, dict):
             message = tornado.escape.json_encode(message)
-        self.ws_connection.write_message(message, binary=binary)
+        return self.ws_connection.write_message(message, binary=binary)
 
     def select_subprotocol(self, subprotocols):
         """Invoked when a new WebSocket requests specific subprotocols.
@@ -444,7 +446,8 @@ def __init__(self, persistent, max_wbits):
             self._compressor = None
 
     def _create_compressor(self):
-        return zlib.compressobj(-1, zlib.DEFLATED, -self._max_wbits)
+        return zlib.compressobj(tornado.web.GZipContentEncoding.GZIP_LEVEL,
+                                zlib.DEFLATED, -self._max_wbits)
 
     def compress(self, data):
         compressor = self._compressor or self._create_compressor()
@@ -670,7 +673,7 @@ def _write_frame(self, fin, opcode, data, flags=0):
         frame += data
         self._wire_bytes_out += len(frame)
         try:
-            self.stream.write(frame)
+            return self.stream.write(frame)
         except StreamClosedError:
             self._abort()
 
@@ -687,7 +690,7 @@ def write_message(self, message, binary=False):
         if self._compressor:
             message = self._compressor.compress(message)
             flags |= self.RSV1
-        self._write_frame(True, opcode, message, flags=flags)
+        return self._write_frame(True, opcode, message, flags=flags)
 
     def write_ping(self, data):
         """Send ping frame."""
@@ -707,7 +710,7 @@ def _on_frame_start(self, data):
         reserved_bits = header & self.RSV_MASK
         self._frame_opcode = header & self.OPCODE_MASK
         self._frame_opcode_is_control = self._frame_opcode & 0x8
-        if self._decompressor is not None:
+        if self._decompressor is not None and self._frame_opcode != 0:
             self._frame_compressed = bool(reserved_bits & self.RSV1)
             reserved_bits &= ~self.RSV1
         if reserved_bits:
@@ -969,7 +972,7 @@ def headers_received(self, start_line, headers):
 
     def write_message(self, message, binary=False):
         """Sends a message to the WebSocket server."""
-        self.protocol.write_message(message, binary)
+        return self.protocol.write_message(message, binary)
 
     def read_message(self, callback=None):
         """Reads a message from the WebSocket server.
@@ -1023,7 +1026,7 @@ def websocket_connect(url, io_loop=None, callback=None, connect_timeout=None,
     style, the application typically calls
     `~.WebSocketClientConnection.read_message` in a loop::
 
-        conn = yield websocket_connection(loop)
+        conn = yield websocket_connect(url)
         while True:
             msg = yield conn.read_message()
             if msg is None: break
diff --git a/lib/validators/__init__.py b/lib/validators/__init__.py
index fe855b2eac..27fbac03bf 100644
--- a/lib/validators/__init__.py
+++ b/lib/validators/__init__.py
@@ -13,4 +13,4 @@
 from .utils import ValidationFailure, validator  # noqa
 from .uuid import uuid  # noqa
 
-__version__ = '0.10'
+__version__ = '0.10.3'
diff --git a/lib/validators/domain.py b/lib/validators/domain.py
index 2eca99e21c..8c70eeef73 100644
--- a/lib/validators/domain.py
+++ b/lib/validators/domain.py
@@ -5,7 +5,7 @@
 pattern = re.compile(
     r'^(([a-zA-Z]{1})|([a-zA-Z]{1}[a-zA-Z]{1})|'
     r'([a-zA-Z]{1}[0-9]{1})|([0-9]{1}[a-zA-Z]{1})|'
-    r'([a-zA-Z0-9][-_.a-zA-Z0-9]{1,61}[a-zA-Z0-9]))\.'
+    r'([a-zA-Z0-9][-_.a-zA-Z0-9]{0,61}[a-zA-Z0-9]))\.'
     r'([a-zA-Z]{2,13}|[a-zA-Z0-9-]{2,30}.[a-zA-Z]{2,3})$'
 )
 
diff --git a/lib/validators/iban.py b/lib/validators/iban.py
index 0d72db2c73..7413d1278e 100644
--- a/lib/validators/iban.py
+++ b/lib/validators/iban.py
@@ -3,7 +3,7 @@
 from .utils import validator
 
 regex = (
-    r'^[A-Z]{2}[0-9]{2}[A-Z0-9]{13,30}$'
+    r'^[A-Z]{2}[0-9]{2}[A-Z0-9]{11,30}$'
 )
 pattern = re.compile(regex)
 
diff --git a/lib/validators/url.py b/lib/validators/url.py
index 61a14575c2..e3108eae3c 100644
--- a/lib/validators/url.py
+++ b/lib/validators/url.py
@@ -2,44 +2,97 @@
 
 from .utils import validator
 
-regex = (
-    r'^[a-z]+://([^/:]+{tld}|([0-9]{{1,3}}\.)'
-    r'{{3}}[0-9]{{1,3}})(:[0-9]+)?(\/.*)?$'
+ip_middle_octet = u"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5]))"
+ip_last_octet = u"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
+
+regex = re.compile(
+    u"^"
+    # protocol identifier
+    u"(?:(?:https?|ftp)://)"
+    # user:pass authentication
+    u"(?:\S+(?::\S*)?@)?"
+    u"(?:"
+    u"(?P"
+    # IP address exclusion
+    # private & local networks
+    u"(?:(?:10|127)" + ip_middle_octet + u"{2}" + ip_last_octet + u")|"
+    u"(?:(?:169\.254|192\.168)" + ip_middle_octet + ip_last_octet + u")|"
+    u"(?:172\.(?:1[6-9]|2\d|3[0-1])" + ip_middle_octet + ip_last_octet + u"))"
+    u"|"
+    # IP address dotted notation octets
+    # excludes loopback network 0.0.0.0
+    # excludes reserved space >= 224.0.0.0
+    # excludes network & broadcast addresses
+    # (first & last IP address of each class)
+    u"(?P"
+    u"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
+    u"" + ip_middle_octet + u"{2}"
+    u"" + ip_last_octet + u")"
+    u"|"
+    # host name
+    u"(?:(?:[a-z\u00a1-\uffff0-9]-?)*[a-z\u00a1-\uffff0-9]+)"
+    # domain name
+    u"(?:\.(?:[a-z\u00a1-\uffff0-9]-?)*[a-z\u00a1-\uffff0-9]+)*"
+    # TLD identifier
+    u"(?:\.(?:[a-z\u00a1-\uffff]{2,}))"
+    u")"
+    # port number
+    u"(?::\d{2,5})?"
+    # resource path
+    u"(?:/\S*)?"
+    u"$", re.UNICODE
 )
 
-pattern_with_tld = re.compile(regex.format(tld=r'\.[a-z]{2,10}'))
-pattern_without_tld = re.compile(regex.format(tld=''))
+pattern = re.compile(regex)
 
 
 @validator
-def url(value, require_tld=True):
+def url(value, public=False):
     """
     Return whether or not given value is a valid URL.
 
     If the value is valid URL this function returns ``True``, otherwise
     :class:`~validators.utils.ValidationFailure`.
 
-    This validator is based on `WTForms URL validator`_.
+    This validator is based on the wonderful `URL validator of dperini`_.
 
-    .. _WTForms URL validator:
-       https://github.com/wtforms/wtforms/blob/master/wtforms/validators.py
+    .. _URL validator of dperini:
+        https://gist.github.com/dperini/729294
 
     Examples::
 
         >>> url('http://foobar.dk')
         True
 
-        >>> url('http://localhost/foobar', require_tld=False)
+        >>> url('http://10.0.0.1')
         True
 
         >>> url('http://foobar.d')
         ValidationFailure(func=url, ...)
 
+        >>> url('http://10.0.0.1', public=True)
+        ValidationFailure(func=url, ...)
+
     .. versionadded:: 0.2
 
+    .. versionchanged:: 0.10.2
+
+        Added support for various exotic URLs and fixed various false
+        positives.
+
+    .. versionchanged:: 0.10.3
+
+        Added ``public`` parameter.
+
     :param value: URL address string to validate
+    :param public: (default=False) Set True to only allow a public IP address
     """
+    if not public:
+        return pattern.match(value)
+
+    match_result = pattern.match(value)
+
+    if match_result.groupdict()['private_ip']:
+        return False
 
-    if require_tld:
-        return pattern_with_tld.match(value)
-    return pattern_without_tld.match(value)
+    return match_result
diff --git a/lib/xmltodict.py b/lib/xmltodict.py
index 746a4bcd7c..ccbf15ac11 100644
--- a/lib/xmltodict.py
+++ b/lib/xmltodict.py
@@ -1,7 +1,10 @@
 #!/usr/bin/env python
 "Makes working with XML feel like you are working with JSON"
 
-from xml.parsers import expat
+try:
+    from defusedexpat import pyexpat as expat
+except ImportError:
+    from xml.parsers import expat
 from xml.sax.saxutils import XMLGenerator
 from xml.sax.xmlreader import AttributesImpl
 try:  # pragma no cover
@@ -29,7 +32,7 @@
     _unicode = str
 
 __author__ = 'Martin Blech'
-__version__ = '0.9.2'
+__version__ = '0.10.2'
 __license__ = 'MIT'
 
 
@@ -50,10 +53,11 @@ def __init__(self,
                  dict_constructor=OrderedDict,
                  strip_whitespace=True,
                  namespace_separator=':',
-                 namespaces=None):
+                 namespaces=None,
+                 force_list=None):
         self.path = []
         self.stack = []
-        self.data = None
+        self.data = []
         self.item = None
         self.item_depth = item_depth
         self.xml_attribs = xml_attribs
@@ -67,6 +71,7 @@ def __init__(self,
         self.strip_whitespace = strip_whitespace
         self.namespace_separator = namespace_separator
         self.namespaces = namespaces
+        self.force_list = force_list
 
     def _build_name(self, full_name):
         if not self.namespaces:
@@ -93,27 +98,38 @@ def startElement(self, full_name, attrs):
         if len(self.path) > self.item_depth:
             self.stack.append((self.item, self.data))
             if self.xml_attribs:
-                attrs = self.dict_constructor(
-                    (self.attr_prefix+self._build_name(key), value)
-                    for (key, value) in attrs.items())
+                attr_entries = []
+                for key, value in attrs.items():
+                    key = self.attr_prefix+self._build_name(key)
+                    if self.postprocessor:
+                        entry = self.postprocessor(self.path, key, value)
+                    else:
+                        entry = (key, value)
+                    if entry:
+                        attr_entries.append(entry)
+                attrs = self.dict_constructor(attr_entries)
             else:
                 attrs = None
             self.item = attrs or None
-            self.data = None
+            self.data = []
 
     def endElement(self, full_name):
         name = self._build_name(full_name)
         if len(self.path) == self.item_depth:
             item = self.item
             if item is None:
-                item = self.data
+                item = (None if not self.data
+                        else self.cdata_separator.join(self.data))
+
             should_continue = self.item_callback(self.path, item)
             if not should_continue:
                 raise ParsingInterrupted()
         if len(self.stack):
-            item, data = self.item, self.data
+            data = (None if not self.data
+                    else self.cdata_separator.join(self.data))
+            item = self.item
             self.item, self.data = self.stack.pop()
-            if self.strip_whitespace and data is not None:
+            if self.strip_whitespace and data:
                 data = data.strip() or None
             if data and self.force_cdata and item is None:
                 item = self.dict_constructor()
@@ -124,14 +140,15 @@ def endElement(self, full_name):
             else:
                 self.item = self.push_data(self.item, name, data)
         else:
-            self.item = self.data = None
+            self.item = None
+            self.data = []
         self.path.pop()
 
     def characters(self, data):
         if not self.data:
-            self.data = data
+            self.data = [data]
         else:
-            self.data += self.cdata_separator + data
+            self.data.append(data)
 
     def push_data(self, item, key, data):
         if self.postprocessor is not None:
@@ -148,9 +165,20 @@ def push_data(self, item, key, data):
             else:
                 item[key] = [value, data]
         except KeyError:
-            item[key] = data
+            if self._should_force_list(key, data):
+                item[key] = [data]
+            else:
+                item[key] = data
         return item
 
+    def _should_force_list(self, key, value):
+        if not self.force_list:
+            return False
+        try:
+            return key in self.force_list
+        except TypeError:
+            return self.force_list(self.path[:-1], key, value)
+
 
 def parse(xml_input, encoding=None, expat=expat, process_namespaces=False,
           namespace_separator=':', **kwargs):
@@ -220,6 +248,41 @@ def parse(xml_input, encoding=None, expat=expat, process_namespaces=False,
         >>> xmltodict.parse('hello', expat=defusedexpat.pyexpat)
         OrderedDict([(u'a', u'hello')])
 
+    You can use the force_list argument to force lists to be created even
+    when there is only a single child of a given level of hierarchy. The
+    force_list argument is a tuple of keys. If the key for a given level
+    of hierarchy is in the force_list argument, that level of hierarchy
+    will have a list as a child (even if there is only one sub-element).
+    The index_keys operation takes precendence over this. This is applied
+    after any user-supplied postprocessor has already run.
+
+        For example, given this input:
+        
+          
+            host1
+            Linux
+            
+              
+                em0
+                10.0.0.1
+              
+            
+          
+        
+
+        If called with force_list=('interface',), it will produce
+        this dictionary:
+        {'servers':
+          {'server':
+            {'name': 'host1',
+             'os': 'Linux'},
+             'interfaces':
+              {'interface':
+                [ {'name': 'em0', 'ip_address': '10.0.0.1' } ] } } }
+
+        `force_list` can also be a callable that receives `path`, `key` and
+        `value`. This is helpful in cases where the logic that decides whether
+        a list should be forced is more complex.
     """
     handler = _DictSAXHandler(namespace_separator=namespace_separator,
                               **kwargs)
@@ -284,6 +347,8 @@ def _emit(key, value, content_handler,
                 cdata = iv
                 continue
             if ik.startswith(attr_prefix):
+                if not isinstance(iv, _unicode):
+                    iv = _unicode(iv)
                 attrs[ik[len(attr_prefix):]] = iv
                 continue
             children.append((ik, iv))
@@ -346,16 +411,23 @@ def unparse(input_dict, output=None, encoding='utf-8', full_document=True,
 if __name__ == '__main__':  # pragma: no cover
     import sys
     import marshal
+    try:
+        stdin = sys.stdin.buffer
+        stdout = sys.stdout.buffer
+    except AttributeError:
+        stdin = sys.stdin
+        stdout = sys.stdout
 
     (item_depth,) = sys.argv[1:]
     item_depth = int(item_depth)
 
+
     def handle_item(path, item):
-        marshal.dump((path, item), sys.stdout)
+        marshal.dump((path, item), stdout)
         return True
 
     try:
-        root = parse(sys.stdin,
+        root = parse(stdin,
                      item_depth=item_depth,
                      item_callback=handle_item,
                      dict_constructor=dict)
diff --git a/sickbeard/providers/bitcannon.py b/sickbeard/providers/bitcannon.py
index de351f34cb..336d3b8aea 100644
--- a/sickbeard/providers/bitcannon.py
+++ b/sickbeard/providers/bitcannon.py
@@ -65,7 +65,7 @@ def search(self, search_strings, age=0, ep_obj=None):  # pylint: disable=too-man
         results = []
         url = 'http://localhost:3000/'
         if self.custom_url:
-            if not validators.url(self.custom_url, require_tld=False):
+            if not validators.url(self.custom_url):
                 logger.log('Invalid custom url set, please check your settings', logger.WARNING)
                 return results
             url = self.custom_url
diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py
index 223e4eb170..64397e4c43 100644
--- a/sickbeard/providers/newznab.py
+++ b/sickbeard/providers/newznab.py
@@ -151,13 +151,13 @@ def search(self, search_strings, age=0, ep_obj=None):  # pylint: disable=too-man
                             title = item.title.get_text(strip=True)
                             download_url = None
                             if item.link:
-                                if validators.url(item.link.get_text(strip=True), require_tld=False):
+                                if validators.url(item.link.get_text(strip=True)):
                                     download_url = item.link.get_text(strip=True)
-                                elif validators.url(item.link.next.strip(), require_tld=False):
+                                elif validators.url(item.link.next.strip()):
                                     download_url = item.link.next.strip()
 
                             if not download_url and item.enclosure:
-                                if validators.url(item.enclosure.get('url', '').strip(), require_tld=False):
+                                if validators.url(item.enclosure.get('url', '').strip()):
                                     download_url = item.enclosure.get('url', '').strip()
 
                             if not (title and download_url):

From 30110d6bed809d3d84ae17b0fb03103f9f88392f Mon Sep 17 00:00:00 2001
From: labrys 
Date: Tue, 21 Jun 2016 05:14:35 -0400
Subject: [PATCH 102/134] Optimize imports

---
 setup.py                             |  2 +-
 sickbeard/common.py                  |  5 +++
 sickbeard/event_queue.py             |  4 +--
 sickbeard/helpers.py                 | 53 +++++++++++++---------------
 sickbeard/logger.py                  | 22 ++++++------
 sickbeard/name_cache.py              | 20 ++++++-----
 sickbeard/nzbget.py                  |  9 ++---
 sickbeard/providers/animebytes.py    |  2 +-
 sickbeard/server/web/home/handler.py | 29 +++++++++------
 tests/db_tests.py                    |  2 +-
 tests/numdict_tests.py               |  8 +----
 tests/torrent_tests.py               |  4 +--
 12 files changed, 83 insertions(+), 77 deletions(-)

diff --git a/setup.py b/setup.py
index b7b956df35..a5111937b9 100644
--- a/setup.py
+++ b/setup.py
@@ -15,7 +15,7 @@
     description="Automatic Video Library Manager for TV Shows",
     long_description=long_description,
     packages=find_packages(),
-    install_requires=[],
+    install_requires=['six', 'requests', 'tornado', 'profilehooks', 'mako', 'subliminal', 'github', 'contextlib2', ],
     test_suite="tests",
     tests_require=[
         'coveralls',
diff --git a/sickbeard/common.py b/sickbeard/common.py
index c8a74042ae..8f15ac9b76 100644
--- a/sickbeard/common.py
+++ b/sickbeard/common.py
@@ -30,6 +30,8 @@
 import re
 import uuid
 
+from six import PY3
+from six.moves import reduce
 from fake_useragent import (
     settings as UA_SETTINGS,
     UserAgent,
@@ -43,6 +45,9 @@
 from sickrage.tagger.episode import EpisodeTags
 from sickrage.recompiled import tags
 
+if PY3:
+    long = int
+
 # If some provider has an issue with functionality of SR, other than user
 # agents, it's best to come talk to us rather than block.  It is no different
 # than us going to a provider if we have questions or issues.
diff --git a/sickbeard/event_queue.py b/sickbeard/event_queue.py
index eb45380ade..211774a1b6 100644
--- a/sickbeard/event_queue.py
+++ b/sickbeard/event_queue.py
@@ -1,7 +1,7 @@
 # coding=utf-8
 import threading
 import traceback
-from Queue import Queue, Empty
+from six.moves.queue import Queue, Empty
 from sickbeard import logger
 from sickrage.helper.exceptions import ex
 
@@ -15,7 +15,7 @@ def event_type(self):
         """
         Returns the type of the event
         """
-        
+
         return self._type
 
 
diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py
index 4e11421c56..573daecc5a 100644
--- a/sickbeard/helpers.py
+++ b/sickbeard/helpers.py
@@ -18,56 +18,49 @@
 # along with SickRage. If not, see .
 # pylint:disable=too-many-lines
 
-import os
-import io
+import ast
+import base64
 import ctypes
+import datetime
+import errno
+import hashlib
+import io
+from itertools import izip, cycle
+import operator
+import os
+import platform
+import random
 import re
+import shutil
 import socket
+from socket import timeout as SocketTimeout
 import ssl
 import stat
 import tempfile
 import time
 import traceback
 import uuid
-import base64
+import xml.etree.ElementTree as ET
 import zipfile
-import datetime
-import errno
-import ast
-import operator
-import platform
-import sickbeard
+
 import adba
-import requests
+from cachecontrol import CacheControl
 import certifi
-import hashlib
-import random
-from contextlib import closing
-from socket import timeout as SocketTimeout
-
+from contextlib2 import suppress, closing
+import requests
 from requests.compat import urlparse
+import shutil_custom
 from six.moves import http_client
 
-from sickbeard import logger, classes
+import sickbeard
+from sickbeard import db, logger, classes
 from sickbeard.common import USER_AGENT
-from sickbeard import db
+
 from sickrage.helper.common import (http_code_description, media_extensions, pretty_file_size,
                                     subtitle_extensions, episode_num, remove_strings)
 from sickrage.helper.encoding import ek
 from sickrage.helper.exceptions import ex
 from sickrage.show.Show import Show
-from cachecontrol import CacheControl
-# from httpcache import CachingHTTPAdapter
-
-from itertools import izip, cycle
-from contextlib2 import suppress
-
-import shutil
-import shutil_custom
-
-import xml.etree.ElementTree as ET
-
-shutil.copyfile = shutil_custom.copyfile_custom
 
 try:
     import urllib
@@ -80,6 +73,8 @@
 except ImportError:
     from urllib2 import splittype
 
+shutil.copyfile = shutil_custom.copyfile_custom
+
 
 def fixGlob(path):
     path = re.sub(r'\[', '[[]', path)
diff --git a/sickbeard/logger.py b/sickbeard/logger.py
index 61c6602825..2578ff7363 100644
--- a/sickbeard/logger.py
+++ b/sickbeard/logger.py
@@ -25,28 +25,28 @@
 from __future__ import unicode_literals
 
 import io
-import os
-import re
-import sys
+import locale
 import logging
-import logging.handlers
 from logging import NullHandler
+from logging.handlers import RotatingFileHandler
+import os
 import pkgutil
 import platform
-import locale
-import sickrage
-import subliminal
-import tornado
+import re
+import sys
 import traceback
 
+import tornado
+import subliminal
+
 from requests.compat import quote
 from github import Github, InputFileContent  # pylint: disable=import-error
 
 import sickbeard
 from sickbeard import classes
 
-from sickrage.helper.encoding import ss
-from sickrage.helper.encoding import ek
+import sickrage
+from sickrage.helper.encoding import ss, ek
 from sickrage.helper.exceptions import ex
 from sickrage.helper.common import dateTimeFormat
 
@@ -276,7 +276,7 @@ def init_logging(self, console_logging=False, file_logging=False, debug_logging=
         # rotating log file handler
         if self.file_logging:
 
-            rfh = logging.handlers.RotatingFileHandler(
+            rfh = RotatingFileHandler(
                 self.log_file, maxBytes=int(sickbeard.LOG_SIZE * 1048576), backupCount=sickbeard.LOG_NR,
                 encoding='utf-8')
             rfh.setFormatter(CensoredFormatter(file_log_pattern, dateTimeFormat))
diff --git a/sickbeard/name_cache.py b/sickbeard/name_cache.py
index ecc69b926a..62d3e7a203 100644
--- a/sickbeard/name_cache.py
+++ b/sickbeard/name_cache.py
@@ -18,9 +18,13 @@
 # You should have received a copy of the GNU General Public License
 # along with SickRage. If not, see .
 import threading
+
 import sickbeard
-from sickbeard import db
-from sickbeard import logger
+from sickbeard import db, logger
+from sickbeard.scene_exceptions import (
+    retrieve_exceptions, get_scene_seasons, get_scene_exceptions,
+)
+from sickbeard.helpers import full_sanitizeSceneName
 
 nameCache = {}
 nameCacheLock = threading.Lock()
@@ -36,7 +40,7 @@ def addNameToCache(name, indexer_id=0):
     cache_db_con = db.DBConnection('cache.db')
 
     # standardize the name we're using to account for small differences in providers
-    name = sickbeard.helpers.full_sanitizeSceneName(name)
+    name = full_sanitizeSceneName(name)
     if name not in nameCache:
         nameCache[name] = int(indexer_id)
         cache_db_con.action("INSERT OR REPLACE INTO scene_names (indexer_id, name) VALUES (?, ?)", [indexer_id, name])
@@ -49,7 +53,7 @@ def retrieveNameFromCache(name):
     :param name: The show name to look up.
     :return: the TVDB id that resulted from the cache lookup or None if the show wasn't found in the cache
     """
-    name = sickbeard.helpers.full_sanitizeSceneName(name)
+    name = full_sanitizeSceneName(name)
     if name in nameCache:
         return int(nameCache[name])
 
@@ -80,7 +84,7 @@ def buildNameCache(show=None):
     :param show: Specify show to build name cache for, if None, just do all shows
     """
     with nameCacheLock:
-        sickbeard.scene_exceptions.retrieve_exceptions()
+        retrieve_exceptions()
 
     if not show:
         # logger.log(u"Building internal name cache for all shows", logger.INFO)
@@ -89,9 +93,9 @@ def buildNameCache(show=None):
     else:
         # logger.log(u"Building internal name cache for " + show.name, logger.DEBUG)
         clearCache(show.indexerid)
-        for curSeason in [-1] + sickbeard.scene_exceptions.get_scene_seasons(show.indexerid):
-            for name in set(sickbeard.scene_exceptions.get_scene_exceptions(show.indexerid, season=curSeason) + [show.name]):
-                name = sickbeard.helpers.full_sanitizeSceneName(name)
+        for curSeason in [-1] + get_scene_seasons(show.indexerid):
+            for name in set(get_scene_exceptions(show.indexerid, season=curSeason) + [show.name]):
+                name = full_sanitizeSceneName(name)
                 if name in nameCache:
                     continue
 
diff --git a/sickbeard/nzbget.py b/sickbeard/nzbget.py
index b4c20a990f..b03da736fe 100644
--- a/sickbeard/nzbget.py
+++ b/sickbeard/nzbget.py
@@ -22,9 +22,9 @@
 
 import datetime
 from base64 import standard_b64encode
-import xmlrpclib
 
 from six.moves.http_client import socket
+from six.moves.xmlrpc_client import ServerProxy, ProtocolError
 
 import sickbeard
 from sickbeard import logger
@@ -55,7 +55,7 @@ def sendNZB(nzb, proper=False):  # pylint: disable=too-many-locals, too-many-sta
         sickbeard.NZBGET_PASSWORD,
         sickbeard.NZBGET_HOST)
 
-    nzbGetRPC = xmlrpclib.ServerProxy(url)
+    nzbGetRPC = ServerProxy(url)
     try:
         if nzbGetRPC.writelog('INFO', 'SickRage connected to drop off {} any moment now.'.format(nzb.name + '.nzb')):
             logger.log('Successful connected to NZBget', logger.DEBUG)
@@ -68,7 +68,7 @@ def sendNZB(nzb, proper=False):  # pylint: disable=too-many-locals, too-many-sta
             logger.WARNING)
         return False
 
-    except xmlrpclib.ProtocolError as e:
+    except ProtocolError as e:
         if e.errmsg == 'Unauthorized':
             logger.log('NZBget username or password is incorrect.', logger.WARNING)
         else:
@@ -107,7 +107,8 @@ def sendNZB(nzb, proper=False):  # pylint: disable=too-many-locals, too-many-sta
     logger.log('URL: ' + url, logger.DEBUG)
 
     try:
-        # Find out if nzbget supports priority (Version 9.0+), old versions beginning with a 0.x will use the old command
+        # Find out if nzbget supports priority (Version 9.0+),
+        # old versions beginning with a 0.x will use the old command
         nzbget_version_str = nzbGetRPC.version()
         nzbget_version = try_int(nzbget_version_str[:nzbget_version_str.find('.')])
         if nzbget_version == 0:
diff --git a/sickbeard/providers/animebytes.py b/sickbeard/providers/animebytes.py
index f3c508ef6d..ce0096da4c 100644
--- a/sickbeard/providers/animebytes.py
+++ b/sickbeard/providers/animebytes.py
@@ -21,7 +21,7 @@
 import re
 import traceback
 
-from urlparse import parse_qs
+from six.moves.urllib_parse import parse_qs
 
 from requests.compat import urljoin
 from requests.utils import dict_from_cookiejar
diff --git a/sickbeard/server/web/home/handler.py b/sickbeard/server/web/home/handler.py
index 1812c19628..72a4783986 100644
--- a/sickbeard/server/web/home/handler.py
+++ b/sickbeard/server/web/home/handler.py
@@ -3,20 +3,21 @@
 from __future__ import unicode_literals
 
 import ast
-import datetime
 from datetime import date
 import json
 import os
 import time
+
 import adba
 from libtrakt import TraktAPI
 from requests.compat import unquote_plus, quote_plus
 from tornado.routes import route
+
 import sickbeard
 from sickbeard import (
     clients, config, db, helpers, logger,
     notifiers, sab, search_queue,
-    subtitles, ui, show_name_helpers
+    subtitles, ui, show_name_helpers,
 )
 from sickbeard.blackandwhitelist import BlackAndWhiteList, short_group_names
 from sickbeard.common import (
@@ -27,6 +28,11 @@
     collectEpisodesFromSearchThread, get_provider_cache_results, getEpisode, update_finished_search_queue_item,
     SEARCH_STATUS_FINISHED, SEARCH_STATUS_SEARCHING, SEARCH_STATUS_QUEUED,
 )
+from sickbeard.scene_exceptions import (
+    get_scene_exceptions,
+    get_all_scene_exceptions,
+    update_scene_exceptions,
+)
 from sickbeard.scene_numbering import (
     get_scene_absolute_numbering, get_scene_absolute_numbering_for_show,
     get_scene_numbering, get_scene_numbering_for_show,
@@ -34,6 +40,8 @@
     set_scene_numbering,
 )
 from sickbeard.versionChecker import CheckVersion
+from sickbeard.server.web.core import WebRoot, PageTemplate
+
 from sickrage.helper.common import (
     try_int, enabled_providers,
 )
@@ -48,7 +56,6 @@
 from sickrage.show.Show import Show
 from sickrage.system.Restart import Restart
 from sickrage.system.Shutdown import Shutdown
-from sickbeard.server.web.core import WebRoot, PageTemplate
 
 
 @route('/home(/?.*)')
@@ -674,10 +681,10 @@ def getSeasonSceneExceptions(self, indexer, indexer_id):
         :return: A json with the scene exceptions per season.
         """
         return json.dumps({
-            'seasonExceptions': sickbeard.scene_exceptions.get_all_scene_exceptions(indexer_id),
+            'seasonExceptions': get_all_scene_exceptions(indexer_id),
             'xemNumbering': {tvdb_season_ep[0]: anidb_season_ep[0]
                              for (tvdb_season_ep, anidb_season_ep)
-                             in get_xem_numbering_for_show(indexer_id, indexer).iteritems()}
+                             in iteritems(get_xem_numbering_for_show(indexer_id, indexer))}
         })
 
     def displayShow(self, show=None):
@@ -834,7 +841,7 @@ def titler(x):
         if show_obj.is_anime:
             bwl = show_obj.release_groups
 
-        show_obj.exceptions = sickbeard.scene_exceptions.get_scene_exceptions(show_obj.indexerid)
+        show_obj.exceptions = get_scene_exceptions(show_obj.indexerid)
 
         indexerid = int(show_obj.indexerid)
         indexer = int(show_obj.indexer)
@@ -1140,7 +1147,7 @@ def titler(x):
         if show_obj.is_anime:
             bwl = show_obj.release_groups
 
-        show_obj.exceptions = sickbeard.scene_exceptions.get_scene_exceptions(show_obj.indexerid)
+        show_obj.exceptions = get_scene_exceptions(show_obj.indexerid)
 
         indexer_id = int(show_obj.indexerid)
         indexer = int(show_obj.indexer)
@@ -1212,7 +1219,7 @@ def plotDetails(show, season, episode):
 
     @staticmethod
     def sceneExceptions(show):
-        exceptions_list = sickbeard.scene_exceptions.get_all_scene_exceptions(show)
+        exceptions_list = get_all_scene_exceptions(show)
         if not exceptions_list:
             return 'No scene exceptions'
 
@@ -1250,7 +1257,7 @@ def editShow(self, show=None, location=None, anyQualities=[], bestQualities=[],
             else:
                 return self._genericMessage('Error', error_string)
 
-        show_obj.exceptions = sickbeard.scene_exceptions.get_scene_exceptions(show_obj.indexerid)
+        show_obj.exceptions = get_scene_exceptions(show_obj.indexerid)
 
         if try_int(quality_preset, None):
             preferred_qualities = []
@@ -1273,7 +1280,7 @@ def editShow(self, show=None, location=None, anyQualities=[], bestQualities=[],
 
             with show_obj.lock:
                 show = show_obj
-                scene_exceptions = sickbeard.scene_exceptions.get_scene_exceptions(show_obj.indexerid)
+                scene_exceptions = get_scene_exceptions(show_obj.indexerid)
 
             if show_obj.is_anime:
                 return t.render(show=show, scene_exceptions=scene_exceptions, groups=groups, whitelist=whitelist,
@@ -1407,7 +1414,7 @@ def editShow(self, show=None, location=None, anyQualities=[], bestQualities=[],
 
         if do_update_exceptions:
             try:
-                sickbeard.scene_exceptions.update_scene_exceptions(show_obj.indexerid, exceptions_list)  # @UndefinedVdexerid)
+                update_scene_exceptions(show_obj.indexerid, exceptions_list)  # @UndefinedVdexerid)
                 time.sleep(cpu_presets[sickbeard.CPU_PRESET])
             except CantUpdateShowException:
                 errors.append('Unable to force an update on scene exceptions of the show.')
diff --git a/tests/db_tests.py b/tests/db_tests.py
index 0c0ce59bd7..f9ebf89812 100644
--- a/tests/db_tests.py
+++ b/tests/db_tests.py
@@ -82,7 +82,7 @@ def test_threaded(self):
         """
         Test multi-threaded selection from the database
         """
-        for _ in xrange(4):
+        for _ in range(4):
             thread = threading.Thread(target=self.select)
             thread.start()
 
diff --git a/tests/numdict_tests.py b/tests/numdict_tests.py
index a5dfa980e4..7156439fc8 100644
--- a/tests/numdict_tests.py
+++ b/tests/numdict_tests.py
@@ -13,15 +13,9 @@
 sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib')))
 sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
 
+from six.moves import UserDict
 from sickbeard.numdict import NumDict
 
-PY3 = sys.version_info >= (3, )
-
-if PY3:
-    from collections import UserDict  # pylint: disable=no-name-in-module
-else:
-    from UserDict import UserDict
-
 
 class NumDictTest(unittest.TestCase):
     """
diff --git a/tests/torrent_tests.py b/tests/torrent_tests.py
index db70f76f3e..9d5fbda5ab 100644
--- a/tests/torrent_tests.py
+++ b/tests/torrent_tests.py
@@ -35,7 +35,7 @@
 from sickbeard.providers.bitcannon import BitCannonProvider
 from sickbeard.tv import TVEpisode, TVShow
 import tests.test_lib as test
-import urlparse
+from six.moves.urllib_parse import urljoin
 
 
 class TorrentBasicTests(test.SickbeardTestDBCase):
@@ -99,7 +99,7 @@ def test_search():  # pylint: disable=too-many-locals
 
         for row in torrent_rows[1:]:
             try:
-                link = urlparse.urljoin(url, (row.find('div', {'class': 'torrentname'}).find_all('a')[1])['href'])
+                link = urljoin(url, (row.find('div', {'class': 'torrentname'}).find_all('a')[1])['href'])
                 _id = row.get('id')[-7:]
                 title = (row.find('div', {'class': 'torrentname'}).find_all('a')[1]).text \
                     or (row.find('div', {'class': 'torrentname'}).find_all('a')[2]).text

From 887ba2b1da21f922638cc6ae42b092c34515500f Mon Sep 17 00:00:00 2001
From: labrys 
Date: Tue, 21 Jun 2016 02:36:07 -0400
Subject: [PATCH 103/134] PY3 has no dict.iterkeys(), dict.iteritems() and
 dict.itervalues() methods

---
 sickbeard/clients/utorrent_client.py          |  3 ++-
 sickbeard/config.py                           |  3 ++-
 sickbeard/databases/main_db.py                | 10 +++++-----
 sickbeard/helpers.py                          |  3 ++-
 sickbeard/logger.py                           |  3 ++-
 sickbeard/metadata/generic.py                 |  6 +++---
 sickbeard/metadata/mediabrowser.py            |  8 ++++----
 sickbeard/name_cache.py                       |  9 ++++++---
 sickbeard/network_timezones.py                | 17 ++++++++--------
 sickbeard/notifiers/plex.py                   | 13 +++++++-----
 sickbeard/scene_exceptions.py                 | 10 ++++++----
 sickbeard/searchBacklog.py                    | 13 ++++++------
 sickbeard/server/api/core.py                  | 10 +++++-----
 sickbeard/server/web/core/base.py             |  5 ++++-
 sickbeard/server/web/home/add_shows.py        | 10 +++++++---
 sickbeard/server/web/home/handler.py          |  9 +++++----
 sickbeard/subtitles.py                        | 19 ++++++++++--------
 tests/sickrage_tests/helper/common_tests.py   | 20 ++++++++++---------
 tests/sickrage_tests/helper/quality_tests.py  |  3 ++-
 .../media/generic_media_tests.py              |  3 ++-
 .../providers/generic_provider_tests.py       |  9 +++++----
 .../providers/nzb_provider_tests.py           |  4 ++--
 .../providers/torrent_provider_tests.py       |  3 ++-
 .../show/coming_episodes_tests.py             |  3 ++-
 tests/sickrage_tests/show/history_tests.py    |  3 ++-
 tests/sickrage_tests/show/show_tests.py       |  4 ++--
 tests/sickrage_tests/system/restart_tests.py  |  3 ++-
 tests/sickrage_tests/system/shutdown_tests.py |  3 ++-
 28 files changed, 120 insertions(+), 89 deletions(-)

diff --git a/sickbeard/clients/utorrent_client.py b/sickbeard/clients/utorrent_client.py
index 7920d25998..8888371b08 100644
--- a/sickbeard/clients/utorrent_client.py
+++ b/sickbeard/clients/utorrent_client.py
@@ -23,6 +23,7 @@
 import logging
 import re
 
+from six import iteritems
 from requests.compat import urljoin
 
 import sickbeard
@@ -53,7 +54,7 @@ def _request(self, method='get', params=None, data=None, files=None, cookies=Non
             'token': self.auth,
         }
 
-        for k, v in params.iteritems() or {}:
+        for k, v in iteritems(params) or {}:
             ordered_params.update({k: v})
 
         return super(UTorrentAPI, self)._request(method=method, params=ordered_params, data=data, files=files)
diff --git a/sickbeard/config.py b/sickbeard/config.py
index 510907078f..9771d6f4c0 100644
--- a/sickbeard/config.py
+++ b/sickbeard/config.py
@@ -23,6 +23,7 @@
 import re
 
 from requests.compat import urlsplit
+from six import iteritems
 from six.moves.urllib.parse import uses_netloc, urlunsplit
 
 import sickbeard
@@ -599,7 +600,7 @@ def check_setting_str(config, cfg_name, item_name, def_val, silent=True, censor_
             config[cfg_name] = {}
             config[cfg_name][item_name] = helpers.encrypt(my_val, encryption_version)
 
-    if privacy_level >= censor_level or (cfg_name, item_name) in logger.censored_items.iteritems():
+    if privacy_level >= censor_level or (cfg_name, item_name) in iteritems(logger.censored_items):
         if not item_name.endswith('custom_url'):
             logger.censored_items[cfg_name, item_name] = my_val
 
diff --git a/sickbeard/databases/main_db.py b/sickbeard/databases/main_db.py
index 89718dc771..6b6937cd1f 100644
--- a/sickbeard/databases/main_db.py
+++ b/sickbeard/databases/main_db.py
@@ -21,17 +21,17 @@
 import datetime
 
 import warnings
-import sickbeard
 import os.path
 
-from sickbeard import db, common, helpers, logger
+from six import iteritems
 
+import sickbeard
+from sickbeard import db, common, helpers, logger, subtitles
 from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
+
 from sickrage.helper.common import dateTimeFormat, episode_num
 from sickrage.helper.encoding import ek
 
-from sickbeard import subtitles
-
 MIN_DB_VERSION = 9  # oldest db version we support migrating from
 MAX_DB_VERSION = 43
 
@@ -238,7 +238,7 @@ def fix_tvrage_show_statues(self):
             '': 'Unknown',
         }
 
-        for old_status, new_status in status_map.iteritems():
+        for old_status, new_status in iteritems(status_map):
             self.connection.action("UPDATE tv_shows SET status = ? WHERE LOWER(status) = ?", [new_status, old_status])
 
     def fix_episode_statuses(self):
diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py
index 573daecc5a..1680b5a3f9 100644
--- a/sickbeard/helpers.py
+++ b/sickbeard/helpers.py
@@ -50,6 +50,7 @@
 import requests
 from requests.compat import urlparse
 import shutil_custom
+from six import iteritems
 from six.moves import http_client
 
 import sickbeard
@@ -173,7 +174,7 @@ def remove_non_release_groups(name, clean_proper=False):
     }
 
     _name = name
-    for remove_string, remove_type in removeWordsList.iteritems():
+    for remove_string, remove_type in iteritems(removeWordsList):
         if remove_type == 'search':
             _name = _name.replace(remove_string, '')
         elif remove_type == 'searchre':
diff --git a/sickbeard/logger.py b/sickbeard/logger.py
index 2578ff7363..d6e25e6f40 100644
--- a/sickbeard/logger.py
+++ b/sickbeard/logger.py
@@ -40,6 +40,7 @@
 import subliminal
 
 from requests.compat import quote
+from six import itervalues
 from github import Github, InputFileContent  # pylint: disable=import-error
 
 import sickbeard
@@ -170,7 +171,7 @@ def format(self, record):
                 msg = re.sub(ssl_error, SSL_ERROR_HELP_MSG, msg)
 
         # set of censored items
-        censored = {item for _, item in censored_items.iteritems() if item}
+        censored = {value for value in itervalues(censored_items) if value}
         # set of censored items and urlencoded counterparts
         censored = censored | {quote(item) for item in censored}
         # convert set items to unicode and typecast to list
diff --git a/sickbeard/metadata/generic.py b/sickbeard/metadata/generic.py
index 45e3d03455..4e2a74bef4 100644
--- a/sickbeard/metadata/generic.py
+++ b/sickbeard/metadata/generic.py
@@ -22,6 +22,7 @@
 import io
 import re
 
+from six import iterkeys
 from tmdb_api.tmdb_api import TMDB
 
 import sickbeard
@@ -34,7 +35,6 @@
 from sickrage.helper.exceptions import ex
 from sickrage.helper.encoding import ek
 
-
 try:
     import xml.etree.cElementTree as etree
 except ImportError:
@@ -320,7 +320,7 @@ def create_episode_thumb(self, ep_obj):
     def create_season_posters(self, show_obj):
         if self.season_posters and show_obj:
             result = []
-            for season, _ in show_obj.episodes.iteritems():  # @UnusedVariable
+            for season in iterkeys(show_obj.episodes):
                 if not self._has_season_poster(show_obj, season):
                     logger.log(u"Metadata provider " + self.name + " creating season posters for " + show_obj.name,
                                logger.DEBUG)
@@ -332,7 +332,7 @@ def create_season_banners(self, show_obj):
         if self.season_banners and show_obj:
             result = []
             logger.log(u"Metadata provider " + self.name + " creating season banners for " + show_obj.name, logger.DEBUG)
-            for season, _ in show_obj.episodes.iteritems():  # @UnusedVariable
+            for season in iterkeys(show_obj.episodes):  # @UnusedVariable
                 if not self._has_season_banner(show_obj, season):
                     result = result + [self.save_season_banners(show_obj, season)]
             return all(result)
diff --git a/sickbeard/metadata/mediabrowser.py b/sickbeard/metadata/mediabrowser.py
index e8a14d7a6a..cb292bf666 100644
--- a/sickbeard/metadata/mediabrowser.py
+++ b/sickbeard/metadata/mediabrowser.py
@@ -21,11 +21,11 @@
 import os
 import re
 
-import sickbeard
-
-from sickbeard.metadata import generic
+from six import iteritems
 
+import sickbeard
 from sickbeard import logger, helpers
+from sickbeard.metadata import generic
 
 from sickrage.helper.common import dateFormat, replace_extension, episode_num
 from sickrage.helper.encoding import ek
@@ -568,7 +568,7 @@ def _ep_data(self, ep_obj):
                 persons_dict['Writer'] += [x.strip() for x in my_ep['writer'].split('|') if x.strip()]
 
         # fill in Persons section with collected directors, guest starts and writers
-        for person_type, names in persons_dict.iteritems():
+        for person_type, names in iteritems(persons_dict):
             # remove doubles
             names = list(set(names))
             for cur_name in names:
diff --git a/sickbeard/name_cache.py b/sickbeard/name_cache.py
index 62d3e7a203..d9aa6ddbef 100644
--- a/sickbeard/name_cache.py
+++ b/sickbeard/name_cache.py
@@ -17,8 +17,11 @@
 #
 # You should have received a copy of the GNU General Public License
 # along with SickRage. If not, see .
+
 import threading
 
+from six import iteritems
+
 import sickbeard
 from sickbeard import db, logger
 from sickbeard.scene_exceptions import (
@@ -65,7 +68,7 @@ def clearCache(indexerid=0):
     cache_db_con = db.DBConnection('cache.db')
     cache_db_con.action("DELETE FROM scene_names WHERE indexer_id = ? OR indexer_id = ?", (indexerid, 0))
 
-    toRemove = [key for key, value in nameCache.iteritems() if value == 0 or value == indexerid]
+    toRemove = [key for key, value in iteritems(nameCache) if value == 0 or value == indexerid]
     for key in toRemove:
         del nameCache[key]
 
@@ -74,7 +77,7 @@ def saveNameCacheToDb():
     """Commit cache to database file"""
     cache_db_con = db.DBConnection('cache.db')
 
-    for name, indexer_id in nameCache.iteritems():
+    for name, indexer_id in iteritems(nameCache):
         cache_db_con.action("INSERT OR REPLACE INTO scene_names (indexer_id, name) VALUES (?, ?)", [indexer_id, name])
 
 
@@ -100,4 +103,4 @@ def buildNameCache(show=None):
                     continue
 
                 nameCache[name] = int(show.indexerid)
-        logger.log(u"Internal name cache for " + show.name + " set to: [ " + u', '.join([key for key, value in nameCache.iteritems() if value == show.indexerid]) + " ]", logger.DEBUG)
+        logger.log(u"Internal name cache for " + show.name + " set to: [ " + u', '.join([key for key, value in iteritems(nameCache) if value == show.indexerid]) + " ]", logger.DEBUG)
diff --git a/sickbeard/network_timezones.py b/sickbeard/network_timezones.py
index 2de274a0f2..64ebf8cc95 100644
--- a/sickbeard/network_timezones.py
+++ b/sickbeard/network_timezones.py
@@ -20,23 +20,22 @@
 
 import re
 import datetime
+
 from dateutil import tz
+from six import iteritems
 
-from sickbeard import db
-from sickbeard import helpers
-from sickbeard import logger
+from sickbeard import db, helpers, logger
 from sickrage.helper.common import try_int
 
-# regex to parse time (12/24 hour format)
-time_regex = re.compile(r'(?P\d{1,2})(?:[:.](?P\d{2})?)? ?(?P[PA]\.? ?M?)?\b', re.I)
-
-network_dict = None
-
 try:
     sb_timezone = tz.tzwinlocal() if tz.tzwinlocal else tz.tzlocal()
 except Exception:
     sb_timezone = tz.tzlocal()
 
+# regex to parse time (12/24 hour format)
+time_regex = re.compile(r'(?P\d{1,2})(?:[:.](?P\d{2})?)? ?(?P[PA]\.? ?M?)?\b', re.I)
+
+network_dict = None
 missing_network_timezones = set()
 
 
@@ -66,7 +65,7 @@ def update_network_dict():
     network_list = dict(cache_db_con.select('SELECT * FROM network_timezones;'))
 
     queries = []
-    for network, timezone in d.iteritems():
+    for network, timezone in iteritems(d):
         existing = network in network_list
         if not existing:
             queries.append(['INSERT OR IGNORE INTO network_timezones VALUES (?,?);', [network, timezone]])
diff --git a/sickbeard/notifiers/plex.py b/sickbeard/notifiers/plex.py
index d490f45585..d9c67f1e48 100644
--- a/sickbeard/notifiers/plex.py
+++ b/sickbeard/notifiers/plex.py
@@ -19,10 +19,8 @@
 # along with SickRage. If not, see .
 
 import re
-try:
-    import xml.etree.cElementTree as etree
-except ImportError:
-    import xml.etree.ElementTree as etree
+
+from six import iteritems
 
 import sickbeard
 from sickbeard import logger, common
@@ -30,6 +28,11 @@
 
 from sickrage.helper.exceptions import ex
 
+try:
+    import xml.etree.cElementTree as etree
+except ImportError:
+    import xml.etree.ElementTree as etree
+
 
 class Notifier(object):
     def __init__(self):
@@ -196,7 +199,7 @@ def update_library(self, ep_obj=None, host=None,  # pylint: disable=too-many-arg
             logger.log(u'PLEX: Updating all hosts with TV sections: ' + ', '.join(set(hosts_all)), logger.DEBUG)
 
         hosts_try = (hosts_match.copy(), hosts_all.copy())[not len(hosts_match)]
-        for section_key, cur_host in hosts_try.iteritems():
+        for section_key, cur_host in iteritems(hosts_try):
 
             url = 'http{}://{}/library/sections/{}/refresh'.format(('', 's')[sickbeard.PLEX_SERVER_HTTPS], cur_host, section_key)
             try:
diff --git a/sickbeard/scene_exceptions.py b/sickbeard/scene_exceptions.py
index 983f691b1f..237745216d 100644
--- a/sickbeard/scene_exceptions.py
+++ b/sickbeard/scene_exceptions.py
@@ -18,12 +18,14 @@
 # You should have received a copy of the GNU General Public License
 # along with SickRage. If not, see .
 
-import adba
 import time
 import datetime
-import requests
 import threading
 import traceback
+
+import adba
+from six import iteritems
+
 import sickbeard
 from sickbeard import db, helpers, logger
 from sickbeard.indexers.indexer_config import INDEXER_TVDB
@@ -245,7 +247,7 @@ def retrieve_exceptions():  # pylint:disable=too-many-locals, too-many-branches
             continue
 
         for cur_exception_dict in exception_dict[cur_indexer_id]:
-            for ex in cur_exception_dict.iteritems():
+            for ex in iteritems(cur_exception_dict):
                 cur_exception, curSeason = ex
                 if cur_exception not in existing_exceptions:
                     queries.append(
@@ -315,7 +317,7 @@ def _xem_exceptions_fetcher():
             if parsedJSON['result'] == 'failure':
                 continue
 
-            for indexerid, names in parsedJSON['data'].iteritems():
+            for indexerid, names in iteritems(parsedJSON['data']):
                 try:
                     xem_exception_dict[int(indexerid)] = names
                 except Exception as error:
diff --git a/sickbeard/searchBacklog.py b/sickbeard/searchBacklog.py
index ab4168f3f7..8c8c68e972 100644
--- a/sickbeard/searchBacklog.py
+++ b/sickbeard/searchBacklog.py
@@ -21,13 +21,12 @@
 import datetime
 import threading
 
-import sickbeard
+from six import iteritems
 
-from sickbeard import db, scheduler
-from sickbeard import search_queue
-from sickbeard import logger
-from sickbeard import ui
-from sickbeard import common
+import sickbeard
+from sickbeard import (
+    db, scheduler, search_queue, logger, ui, common,
+)
 
 
 class BacklogSearchScheduler(scheduler.Scheduler):
@@ -103,7 +102,7 @@ def searchBacklog(self, which_shows=None):
 
             segments = self._get_segments(curShow, fromDate)
 
-            for season, segment in segments.iteritems():
+            for season, segment in iteritems(segments):
                 self.currentSearchInfo = {'title': curShow.name + " Season " + str(season)}
 
                 backlog_queue_item = search_queue.BacklogQueueItem(curShow, segment)
diff --git a/sickbeard/server/api/core.py b/sickbeard/server/api/core.py
index 2405711665..d9a6432d2a 100644
--- a/sickbeard/server/api/core.py
+++ b/sickbeard/server/api/core.py
@@ -33,8 +33,8 @@
 import traceback
 
 from requests.compat import unquote_plus
-
-from tornado.web import RequestHandler  # pylint: disable=import-error
+from six import iteritems
+from tornado.web import RequestHandler
 
 import sickbeard
 from sickbeard import (
@@ -102,7 +102,7 @@ def __init__(self, *args, **kwargs):
 
     def get(self, *args, **kwargs):
         kwargs = self.request.arguments
-        for arg, value in kwargs.iteritems():
+        for arg, value in iteritems(kwargs):
             if len(value) == 1:
                 kwargs[arg] = value[0]
 
@@ -658,7 +658,7 @@ def run(self):
         grouped_coming_episodes = ComingEpisodes.get_coming_episodes(self.type, self.sort, True, self.paused)
         data = {section: [] for section in grouped_coming_episodes.keys()}
 
-        for section, coming_episodes in grouped_coming_episodes.iteritems():
+        for section, coming_episodes in iteritems(grouped_coming_episodes):
             for coming_episode in coming_episodes:
                 data[section].append({
                     'airdate': coming_episode['airdate'],
@@ -902,7 +902,7 @@ def _ep_result(result_code, ep, msg=""):
 
         extra_msg = ""
         if start_backlog:
-            for season, segment in segments.iteritems():
+            for season, segment in iteritems(segments):
                 cur_backlog_queue_item = search_queue.BacklogQueueItem(show_obj, segment)
                 sickbeard.searchQueueScheduler.action.add_item(cur_backlog_queue_item)  # @UndefinedVariable
 
diff --git a/sickbeard/server/web/core/base.py b/sickbeard/server/web/core/base.py
index 20c4d7a477..fffa32c349 100644
--- a/sickbeard/server/web/core/base.py
+++ b/sickbeard/server/web/core/base.py
@@ -14,6 +14,7 @@
 from mako.runtime import UNDEFINED
 from mako.template import Template as MakoTemplate
 from requests.compat import urljoin
+from six import iteritems
 from tornado.concurrent import run_on_executor
 from tornado.escape import utf8
 from tornado.gen import coroutine
@@ -21,11 +22,13 @@
 from tornado.process import cpu_count
 from tornado.routes import route
 from tornado.web import RequestHandler, HTTPError, authenticated
+
 import sickbeard
 from sickbeard import (
     classes, db, helpers, logger, network_timezones, ui
 )
 from sickbeard.server.api.core import function_mapper
+
 from sickrage.helper.encoding import ek
 from sickrage.media.ShowBanner import ShowBanner
 from sickrage.media.ShowFanArt import ShowFanArt
@@ -258,7 +261,7 @@ def get(self, route, *args, **kwargs):
     def async_call(self, function):
         try:
             kwargs = self.request.arguments
-            for arg, value in kwargs.iteritems():
+            for arg, value in iteritems(kwargs):
                 if len(value) == 1:
                     kwargs[arg] = value[0]
 
diff --git a/sickbeard/server/web/home/add_shows.py b/sickbeard/server/web/home/add_shows.py
index d651b7a823..8a6e4a40b0 100644
--- a/sickbeard/server/web/home/add_shows.py
+++ b/sickbeard/server/web/home/add_shows.py
@@ -6,10 +6,13 @@
 import json
 import os
 import re
+
 from libtrakt import TraktAPI
 from libtrakt.exceptions import traktException
 from requests.compat import unquote_plus
+from six import iteritems
 from tornado.routes import route
+
 import sickbeard
 from sickbeard import (
     classes, config, db, helpers, logger, ui,
@@ -19,6 +22,9 @@
 from sickbeard.helpers import get_showname_from_indexer
 from sickbeard.imdbPopular import imdb_popular
 from sickbeard.indexers.indexer_exceptions import indexer_exception
+from sickbeard.server.web.core import PageTemplate
+from sickbeard.server.web.home.handler import Home
+
 from sickrage.helper.common import (
     sanitize_filename, try_int,
 )
@@ -28,8 +34,6 @@
     MultipleShowObjectsException,
 )
 from sickrage.show.Show import Show
-from sickbeard.server.web.core import PageTemplate
-from sickbeard.server.web.home.handler import Home
 
 
 @route('/addShows(/?.*)')
@@ -91,7 +95,7 @@ def searchIndexersForShowName(search_term, lang=None, indexer=None):
                 except indexer_exception as msg:
                     logger.log(u'Error searching for show: {error}'.format(error=msg))
 
-        for i, shows in results.iteritems():
+        for i, shows in iteritems(results):
             final_results.extend({(sickbeard.indexerApi(i).name, i, sickbeard.indexerApi(i).config['show_url'], int(show['id']),
                                    show['seriesname'], show['firstaired']) for show in shows})
 
diff --git a/sickbeard/server/web/home/handler.py b/sickbeard/server/web/home/handler.py
index 72a4783986..adbebcdd90 100644
--- a/sickbeard/server/web/home/handler.py
+++ b/sickbeard/server/web/home/handler.py
@@ -11,6 +11,7 @@
 import adba
 from libtrakt import TraktAPI
 from requests.compat import unquote_plus, quote_plus
+from six import iteritems
 from tornado.routes import route
 
 import sickbeard
@@ -999,7 +1000,7 @@ def manualSearchCheckCache(self, show, season, episode, manual_search_type, **la
 
         sql_episode = '' if manual_search_type == 'season' else episode
 
-        for provider, last_update in last_prov_updates.iteritems():
+        for provider, last_update in iteritems(last_prov_updates):
             table_exists = main_db_con.select(
                 b'SELECT name '
                 b'FROM sqlite_master '
@@ -1224,7 +1225,7 @@ def sceneExceptions(show):
             return 'No scene exceptions'
 
         out = []
-        for season, names in iter(sorted(exceptions_list.iteritems())):
+        for season, names in iter(sorted(iteritems(exceptions_list))):
             if season == -1:
                 season = '*'
             out.append('S{season}: {names}'.format(season=season, names=', '.join(names)))
@@ -1726,7 +1727,7 @@ def setStatus(self, show=None, eps=None, status=None, direct=False):
             msg = 'Backlog was automatically started for the following seasons of {show}:
'.format(show=show_obj.name) msg += '
    ' - for season, segment in segments.iteritems(): + for season, segment in iteritems(segments): cur_backlog_queue_item = search_queue.BacklogQueueItem(show_obj, segment) sickbeard.searchQueueScheduler.action.add_item(cur_backlog_queue_item) @@ -1748,7 +1749,7 @@ def setStatus(self, show=None, eps=None, status=None, direct=False): msg = 'Retrying Search was automatically started for the following season of {show}:
    '.format(show=show_obj.name) msg += '
      ' - for season, segment in segments.iteritems(): + for season, segment in iteritems(segments): cur_failed_queue_item = search_queue.FailedQueueItem(show_obj, segment) sickbeard.searchQueueScheduler.action.add_item(cur_failed_queue_item) diff --git a/sickbeard/subtitles.py b/sickbeard/subtitles.py index 6493bc2995..2a137ff96c 100644 --- a/sickbeard/subtitles.py +++ b/sickbeard/subtitles.py @@ -18,28 +18,31 @@ # # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . + import datetime import logging import operator import os import re -import sickbeard import subprocess -import traceback import time +import traceback from babelfish import Language, language_converters +from six import iteritems from dogpile.cache.api import NO_VALUE -from sickbeard.common import cpu_presets -from sickrage.helper.common import dateTimeFormat, episode_num, subtitle_extensions -from sickrage.helper.exceptions import ex -from sickrage.show.Show import Show from subliminal import (compute_score, ProviderPool, provider_manager, refine, refiner_manager, region, save_subtitles, scan_video) from subliminal.core import search_external_subtitles from subliminal.score import episode_scores from subliminal.subtitle import get_subtitle_path +import sickbeard +from sickbeard.common import cpu_presets +from sickrage.helper.common import dateTimeFormat, episode_num, subtitle_extensions +from sickrage.helper.exceptions import ex +from sickrage.show.Show import Show + from . import db, history, processTV from .helpers import isMediaFile, isRarFile, remove_non_release_groups @@ -556,7 +559,7 @@ def get_video(video_path, subtitles_dir=None, subtitles=True, embedded_subtitles payload = {'subtitles_dir': subtitles_dir, 'subtitles': subtitles, 'embedded_subtitles': embedded_subtitles, 'release_name': release_name} cached_payload = region.get(key, expiration_time=VIDEO_EXPIRATION_TIME) - if cached_payload != NO_VALUE and {k: v for k, v in cached_payload.iteritems() if k != 'video'} == payload: + if cached_payload != NO_VALUE and {k: v for k, v in iteritems(cached_payload) if k != 'video'} == payload: logger.debug(u'Found cached video information under key %s', key) return cached_payload['video'] @@ -723,7 +726,7 @@ def subtitles_download_in_pp(): # pylint: disable=too-many-locals, too-many-bra if not isMediaFile(filename): continue - + filename = clear_non_release_groups(root, filename) video_path = os.path.join(root, filename) diff --git a/tests/sickrage_tests/helper/common_tests.py b/tests/sickrage_tests/helper/common_tests.py index a2b5b668e0..45436f2ffd 100644 --- a/tests/sickrage_tests/helper/common_tests.py +++ b/tests/sickrage_tests/helper/common_tests.py @@ -32,6 +32,8 @@ sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../lib'))) sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..'))) +from six import iteritems + import sickbeard from sickrage.helper.common import http_code_description, is_sync_file, is_torrent_or_nzb_file, pretty_file_size from sickrage.helper.common import remove_extension, replace_extension, sanitize_filename, try_int, convert_size, episode_num @@ -73,7 +75,7 @@ def test_http_code_description(self): } for test in test_cases, unicode_test_cases: - for (http_code, result) in test.iteritems(): + for (http_code, result) in iteritems(test): self.assertEqual(http_code_description(http_code), result) def test_is_sync_file(self): @@ -119,7 +121,7 @@ def test_is_sync_file(self): } for tests in test_cases, unicode_test_cases: - for (filename, result) in tests.iteritems(): + for (filename, result) in iteritems(tests): self.assertEqual(is_sync_file(filename), result) def test_is_torrent_or_nzb_file(self): @@ -151,7 +153,7 @@ def test_is_torrent_or_nzb_file(self): } for tests in test_cases, unicode_test_cases: - for (filename, result) in tests.iteritems(): + for (filename, result) in iteritems(tests): self.assertEqual(is_torrent_or_nzb_file(filename), result) def test_pretty_file_size(self): @@ -188,7 +190,7 @@ def test_pretty_file_size(self): } for tests in test_cases, unicode_test_cases: - for (size, result) in tests.iteritems(): + for (size, result) in iteritems(tests): self.assertEqual(pretty_file_size(size), result) def test_remove_extension(self): @@ -229,7 +231,7 @@ def test_remove_extension(self): u'file.name.avi': u'file.name', } for tests in test_cases, unicode_test_cases: - for (extension, result) in tests.iteritems(): + for (extension, result) in iteritems(tests): self.assertEqual(remove_extension(extension), result) def test_replace_extension(self): @@ -305,7 +307,7 @@ def test_replace_extension(self): } for tests in test_cases, unicode_test_cases: - for ((filename, extension), result) in tests.iteritems(): + for ((filename, extension), result) in iteritems(tests): self.assertEqual(replace_extension(filename, extension), result) def test_sanitize_filename(self): @@ -333,7 +335,7 @@ def test_sanitize_filename(self): } for tests in test_cases, unicode_test_cases: - for (filename, result) in tests.iteritems(): + for (filename, result) in iteritems(tests): self.assertEqual(sanitize_filename(filename), result) def test_try_int(self): @@ -363,7 +365,7 @@ def test_try_int(self): } for test in test_cases, unicode_test_cases: - for (candidate, result) in test.iteritems(): + for (candidate, result) in iteritems(test): self.assertEqual(try_int(candidate), result) def test_try_int_with_default(self): @@ -394,7 +396,7 @@ def test_try_int_with_default(self): } for test in test_cases, unicode_test_cases: - for (candidate, result) in test.iteritems(): + for (candidate, result) in iteritems(test): self.assertEqual(try_int(candidate, default_value), result) def test_convert_size(self): diff --git a/tests/sickrage_tests/helper/quality_tests.py b/tests/sickrage_tests/helper/quality_tests.py index a6a34236af..1b26c7dcc7 100644 --- a/tests/sickrage_tests/helper/quality_tests.py +++ b/tests/sickrage_tests/helper/quality_tests.py @@ -30,6 +30,7 @@ sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../lib'))) sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..'))) +from six import iteritems from sickbeard.common import ANY, HD, HD1080p, HD720p, Quality, SD from sickrage.helper.quality import get_quality_string @@ -62,7 +63,7 @@ def test_get_quality_string(self): 1000000: 'Custom', # An invalid quality number to test the default case } - for (quality, result) in tests.iteritems(): + for (quality, result) in iteritems(tests): self.assertEqual(get_quality_string(quality), result) diff --git a/tests/sickrage_tests/media/generic_media_tests.py b/tests/sickrage_tests/media/generic_media_tests.py index e7ce73f0ce..a4cfa67623 100644 --- a/tests/sickrage_tests/media/generic_media_tests.py +++ b/tests/sickrage_tests/media/generic_media_tests.py @@ -32,6 +32,7 @@ import sickbeard +from six import iteritems from sickrage.media.GenericMedia import GenericMedia @@ -108,7 +109,7 @@ def test___init__(self): } for test in test_cases, unicode_test_cases: - for ((indexer_id, media_format), (expected_indexer_id, expected_media_format)) in test.iteritems(): + for ((indexer_id, media_format), (expected_indexer_id, expected_media_format)) in iteritems(test): generic_media = GenericMedia(indexer_id, media_format) self.assertEqual(generic_media.indexer_id, expected_indexer_id) diff --git a/tests/sickrage_tests/providers/generic_provider_tests.py b/tests/sickrage_tests/providers/generic_provider_tests.py index 353806a5b2..9a1e9413dd 100644 --- a/tests/sickrage_tests/providers/generic_provider_tests.py +++ b/tests/sickrage_tests/providers/generic_provider_tests.py @@ -32,6 +32,7 @@ sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../lib'))) sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..'))) +from six import iteritems from sickrage.providers.GenericProvider import GenericProvider @@ -99,7 +100,7 @@ def test_get_id(self): } for test in test_cases, unicode_test_cases: - for (name, result) in test.iteritems(): + for (name, result) in iteritems(test): self.assertEqual(GenericProvider(name).get_id(), result) def test_image_name(self): @@ -161,7 +162,7 @@ def test_image_name(self): } for test in test_cases, unicode_test_cases: - for (name, result) in test.iteritems(): + for (name, result) in iteritems(test): self.assertEqual(GenericProvider(name).image_name(), result) def test_is_active(self): @@ -235,7 +236,7 @@ def test_make_id(self): } for test in test_cases, unicode_test_cases: - for (name, result) in test.iteritems(): + for (name, result) in iteritems(test): self.assertEqual(GenericProvider.make_id(name), result) def test_seed_ratio(self): @@ -282,7 +283,7 @@ def test_search(self): } for test in test_cases, unicode_test_cases: - for (search_params, result) in test.iteritems(): + for (search_params, result) in iteritems(test): self.assertEqual(GenericProvider('Test Provider').search(search_params), result) def test__get_size(self): diff --git a/tests/sickrage_tests/providers/nzb_provider_tests.py b/tests/sickrage_tests/providers/nzb_provider_tests.py index 711e65ac3f..123f4248d2 100644 --- a/tests/sickrage_tests/providers/nzb_provider_tests.py +++ b/tests/sickrage_tests/providers/nzb_provider_tests.py @@ -30,8 +30,8 @@ sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../lib'))) sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..'))) +from six import iteritems import sickbeard - from generic_provider_tests import GenericProviderTests from sickrage.providers.GenericProvider import GenericProvider from sickrage.providers.nzb.NZBProvider import NZBProvider @@ -64,7 +64,7 @@ def test_is_active(self): (True, True): True, } - for ((use_nzb, enabled), result) in test_cases.iteritems(): + for ((use_nzb, enabled), result) in iteritems(test_cases): sickbeard.USE_NZBS = use_nzb provider = NZBProvider('Test Provider') diff --git a/tests/sickrage_tests/providers/torrent_provider_tests.py b/tests/sickrage_tests/providers/torrent_provider_tests.py index 7449252b37..17e837d25e 100644 --- a/tests/sickrage_tests/providers/torrent_provider_tests.py +++ b/tests/sickrage_tests/providers/torrent_provider_tests.py @@ -30,6 +30,7 @@ sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../lib'))) sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..'))) +from six import iteritems import sickbeard from generic_provider_tests import GenericProviderTests @@ -64,7 +65,7 @@ def test_is_active(self): (True, True): True, } - for ((use_torrents, enabled), result) in test_cases.iteritems(): + for ((use_torrents, enabled), result) in iteritems(test_cases): sickbeard.USE_TORRENTS = use_torrents provider = TorrentProvider('Test Provider') diff --git a/tests/sickrage_tests/show/coming_episodes_tests.py b/tests/sickrage_tests/show/coming_episodes_tests.py index 7e70604ce4..4eb5b5ef61 100644 --- a/tests/sickrage_tests/show/coming_episodes_tests.py +++ b/tests/sickrage_tests/show/coming_episodes_tests.py @@ -32,6 +32,7 @@ sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../lib'))) sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..'))) +from six import iteritems from sickrage.show.ComingEpisodes import ComingEpisodes @@ -105,7 +106,7 @@ def test_get_sort(self): } for tests in test_cases, unicode_test_cases: - for (sort, result) in tests.iteritems(): + for (sort, result) in iteritems(tests): self.assertEqual(ComingEpisodes._get_sort(sort), result) # pylint: disable=protected-access diff --git a/tests/sickrage_tests/show/history_tests.py b/tests/sickrage_tests/show/history_tests.py index 5e0fd8047a..70ad41c0e7 100644 --- a/tests/sickrage_tests/show/history_tests.py +++ b/tests/sickrage_tests/show/history_tests.py @@ -30,6 +30,7 @@ sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../lib'))) sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..'))) +from six import iteritems from sickbeard.common import Quality from sickrage.show.History import History @@ -62,7 +63,7 @@ def test_get_actions(self): } for tests in test_cases, unicode_test_cases: - for (action, result) in tests.iteritems(): + for (action, result) in iteritems(tests): self.assertEqual(History._get_actions(action), result) # pylint: disable=protected-access if __name__ == '__main__': diff --git a/tests/sickrage_tests/show/show_tests.py b/tests/sickrage_tests/show/show_tests.py index 804d711a77..ad03f32113 100644 --- a/tests/sickrage_tests/show/show_tests.py +++ b/tests/sickrage_tests/show/show_tests.py @@ -32,8 +32,8 @@ sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../lib'))) sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..'))) +from six import iteritems import sickbeard - from sickbeard.common import Quality from sickbeard.tv import TVShow from sickrage.helper.exceptions import MultipleShowObjectsException @@ -82,7 +82,7 @@ def test_find(self): } for tests in test_cases, unicode_test_cases: - for ((use_shows, indexer_id), result) in tests.iteritems(): + for ((use_shows, indexer_id), result) in iteritems(tests): if use_shows: self.assertEqual(Show.find(shows, indexer_id), result) else: diff --git a/tests/sickrage_tests/system/restart_tests.py b/tests/sickrage_tests/system/restart_tests.py index 313ea97c21..5e41c6d560 100644 --- a/tests/sickrage_tests/system/restart_tests.py +++ b/tests/sickrage_tests/system/restart_tests.py @@ -30,6 +30,7 @@ sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../lib'))) sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..'))) +from six import iteritems import sickbeard from sickbeard.event_queue import Events from sickrage.system.Restart import Restart @@ -62,7 +63,7 @@ def test_restart(self): } for tests in test_cases, unicode_test_cases: - for (pid, result) in tests.iteritems(): + for (pid, result) in iteritems(tests): self.assertEqual(Restart.restart(pid), result) diff --git a/tests/sickrage_tests/system/shutdown_tests.py b/tests/sickrage_tests/system/shutdown_tests.py index 9499dd5283..ab29c67bc9 100644 --- a/tests/sickrage_tests/system/shutdown_tests.py +++ b/tests/sickrage_tests/system/shutdown_tests.py @@ -30,6 +30,7 @@ sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../lib'))) sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..'))) +from six import iteritems import sickbeard from sickbeard.event_queue import Events from sickrage.system.Shutdown import Shutdown @@ -62,7 +63,7 @@ def test_shutdown(self): } for tests in test_cases, unicode_test_cases: - for (pid, result) in tests.iteritems(): + for (pid, result) in iteritems(tests): self.assertEqual(Shutdown.stop(pid), result) From 2f85be481d42cf3c00f9ad500a07b23738dd9d29 Mon Sep 17 00:00:00 2001 From: labrys Date: Tue, 21 Jun 2016 03:50:16 -0400 Subject: [PATCH 104/134] PY3 has no basestring type --- sickbeard/metadata/kodi_12plus.py | 15 +++++++++------ sickbeard/metadata/mede8er.py | 4 +++- sickbeard/metadata/mediabrowser.py | 4 ++-- sickbeard/name_parser/parser.py | 16 +++++++++------- sickbeard/server/web/home/post_process.py | 4 +++- sickbeard/show_name_helpers.py | 13 +++++++------ sickbeard/subtitles.py | 4 ++-- 7 files changed, 35 insertions(+), 25 deletions(-) diff --git a/sickbeard/metadata/kodi_12plus.py b/sickbeard/metadata/kodi_12plus.py index 652fdd1249..86ff75bebc 100644 --- a/sickbeard/metadata/kodi_12plus.py +++ b/sickbeard/metadata/kodi_12plus.py @@ -21,9 +21,12 @@ import datetime from babelfish import Country +from six import string_types + import sickbeard from sickbeard import logger, helpers from sickbeard.metadata import generic + from sickrage.helper.common import dateFormat, episode_num from sickrage.helper.exceptions import ShowNotFoundException @@ -179,7 +182,7 @@ def _show_data(self, show_obj): indexer_id = etree.SubElement(tv_node, 'id') indexer_id.text = str(my_show['id']) - if getattr(my_show, 'genre', None) and isinstance(my_show['genre'], basestring): + if getattr(my_show, 'genre', None) and isinstance(my_show['genre'], string_types): for genre in self._split_info(my_show['genre']): cur_genre = etree.SubElement(tv_node, 'genre') cur_genre.text = genre @@ -202,12 +205,12 @@ def _show_data(self, show_obj): studio = etree.SubElement(tv_node, 'studio') studio.text = my_show['network'].strip() - if getattr(my_show, 'writer', None) and isinstance(my_show['writer'], basestring): + if getattr(my_show, 'writer', None) and isinstance(my_show['writer'], string_types): for writer in self._split_info(my_show['writer']): cur_writer = etree.SubElement(tv_node, 'credits') cur_writer.text = writer - if getattr(my_show, 'director', None) and isinstance(my_show['director'], basestring): + if getattr(my_show, 'director', None) and isinstance(my_show['director'], string_types): for director in self._split_info(my_show['director']): cur_director = etree.SubElement(tv_node, 'director') cur_director.text = director @@ -351,17 +354,17 @@ def _ep_data(self, ep_obj): rating = etree.SubElement(episode, 'rating') rating.text = my_ep['rating'] - if getattr(my_ep, 'writer', None) and isinstance(my_ep['writer'], basestring): + if getattr(my_ep, 'writer', None) and isinstance(my_ep['writer'], string_types): for writer in self._split_info(my_ep['writer']): cur_writer = etree.SubElement(episode, 'credits') cur_writer.text = writer - if getattr(my_ep, 'director', None) and isinstance(my_ep['director'], basestring): + if getattr(my_ep, 'director', None) and isinstance(my_ep['director'], string_types): for director in self._split_info(my_ep['director']): cur_director = etree.SubElement(episode, 'director') cur_director.text = director - if getattr(my_ep, 'gueststars', None) and isinstance(my_ep['gueststars'], basestring): + if getattr(my_ep, 'gueststars', None) and isinstance(my_ep['gueststars'], string_types): for actor in self._split_info(my_ep['gueststars']): cur_actor = etree.SubElement(episode, 'actor') cur_actor_name = etree.SubElement(cur_actor, 'name') diff --git a/sickbeard/metadata/mede8er.py b/sickbeard/metadata/mede8er.py index cf8204e247..0057490352 100644 --- a/sickbeard/metadata/mede8er.py +++ b/sickbeard/metadata/mede8er.py @@ -22,6 +22,8 @@ import os import datetime +from six import string_types + import sickbeard from sickbeard import logger, helpers from sickbeard.metadata import mediabrowser @@ -325,7 +327,7 @@ def _ep_data(self, ep_obj): if getattr(my_show, '_actors', None) or getattr(my_ep, 'gueststars', None): cast = etree.SubElement(episode, 'cast') - if getattr(my_ep, 'gueststars', None) and isinstance(my_ep['gueststars'], basestring): + if getattr(my_ep, 'gueststars', None) and isinstance(my_ep['gueststars'], string_types): for actor in (x.strip() for x in my_ep['gueststars'].split('|') if x.strip()): cur_actor = etree.SubElement(cast, 'actor') cur_actor.text = actor diff --git a/sickbeard/metadata/mediabrowser.py b/sickbeard/metadata/mediabrowser.py index cb292bf666..624a547734 100644 --- a/sickbeard/metadata/mediabrowser.py +++ b/sickbeard/metadata/mediabrowser.py @@ -21,7 +21,7 @@ import os import re -from six import iteritems +from six import iteritems, string_types import sickbeard from sickbeard import logger, helpers @@ -360,7 +360,7 @@ def _show_data(self, show_obj): zap2it_id = etree.SubElement(tv_node, 'Zap2ItId') zap2it_id.text = my_show['zap2it_id'] - if getattr(my_show, 'genre', None) and isinstance(my_show['genre'], basestring): + if getattr(my_show, 'genre', None) and isinstance(my_show['genre'], string_types): genres = etree.SubElement(tv_node, 'Genres') for genre in my_show['genre'].split('|'): if genre.strip(): diff --git a/sickbeard/name_parser/parser.py b/sickbeard/name_parser/parser.py index ff74aefcff..d177082186 100644 --- a/sickbeard/name_parser/parser.py +++ b/sickbeard/name_parser/parser.py @@ -19,18 +19,20 @@ # along with SickRage. If not, see . import os -import time import re -import os.path +import time + +import dateutil +from six import string_types + import sickbeard from sickbeard.name_parser import regexes - from sickbeard import logger, helpers, scene_numbering, common, scene_exceptions, db +from sickbeard.helpers import remove_non_release_groups + from sickrage.helper.common import remove_extension from sickrage.helper.encoding import ek from sickrage.helper.exceptions import ex -from sickbeard.helpers import remove_non_release_groups -import dateutil class NameParser(object): @@ -106,7 +108,7 @@ def _parse_string(self, name): # pylint: disable=too-many-locals, too-many-bran matches = [] bestResult = None - + # Remove non release groups from filename name = remove_non_release_groups(name) @@ -362,7 +364,7 @@ def _combine_results(first, second, attr): @staticmethod def _unicodify(obj, encoding="utf-8"): - if isinstance(obj, basestring): + if isinstance(obj, string_types): if not isinstance(obj, unicode): obj = unicode(obj, encoding, 'replace') return obj diff --git a/sickbeard/server/web/home/post_process.py b/sickbeard/server/web/home/post_process.py index 9ee1f9af41..ffa3ab5f48 100644 --- a/sickbeard/server/web/home/post_process.py +++ b/sickbeard/server/web/home/post_process.py @@ -2,7 +2,9 @@ from __future__ import unicode_literals +from six import string_types from tornado.routes import route + from sickbeard import processTV from sickrage.helper.encoding import ss from sickbeard.server.web.core import PageTemplate @@ -26,7 +28,7 @@ def processEpisode(self, proc_dir=None, nzbName=None, jobName=None, quiet=None, nzb_name = nzbName def argToBool(argument): - if isinstance(argument, basestring): + if isinstance(argument, string_types): _arg = argument.strip().lower() else: _arg = argument diff --git a/sickbeard/show_name_helpers.py b/sickbeard/show_name_helpers.py index f000173881..58cb884b4c 100644 --- a/sickbeard/show_name_helpers.py +++ b/sickbeard/show_name_helpers.py @@ -19,8 +19,10 @@ import fnmatch import os - import re +from collections import namedtuple + +from six import string_types import sickbeard from sickbeard import common @@ -28,7 +30,6 @@ from sickbeard import logger from sickrage.helper.encoding import ek from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException -from collections import namedtuple resultFilters = [ "(dir|sub|nfo)fix", @@ -52,14 +53,14 @@ def containsAtLeastOneWord(name, words): Returns: False if the name doesn't contain any word of words list, or the found word from the list. """ - if isinstance(words, basestring): + if isinstance(words, string_types): words = words.split(',') items = [(re.compile(r'(^|[\W_])%s($|[\W_])' % word.strip(), re.I), word.strip()) for word in words] for regexp, word in items: if regexp.search(name): # subs_words = '.dub.' or '.dksub.' or else subs_word = regexp.search(name).group(0) - # If word is a regex like "dub(bed)?" or "sub(bed|ed|pack|s)" + # If word is a regex like "dub(bed)?" or "sub(bed|ed|pack|s)" # then return just the matched word: "dub" and not full regex if word in resultFilters: return subs_word.replace(".","") @@ -183,7 +184,7 @@ def show_words(showObj): """ ShowWords = namedtuple('show_words', ['preferred_words', 'undesired_words', 'ignore_words', 'require_words']) - + preferred_words = ",".join(sickbeard.PREFERRED_WORDS.split(',')) if sickbeard.PREFERRED_WORDS.split(',') else '' undesired_words = ",".join(sickbeard.UNDESIRED_WORDS.split(',')) if sickbeard.UNDESIRED_WORDS.split(',') else '' @@ -201,5 +202,5 @@ def show_words(showObj): ignore_words = ",".join(final_ignore) require_words = ",".join(final_require) - + return ShowWords(preferred_words, undesired_words, ignore_words, require_words) diff --git a/sickbeard/subtitles.py b/sickbeard/subtitles.py index 2a137ff96c..24b64e6060 100644 --- a/sickbeard/subtitles.py +++ b/sickbeard/subtitles.py @@ -29,7 +29,7 @@ import traceback from babelfish import Language, language_converters -from six import iteritems +from six import iteritems, string_types from dogpile.cache.api import NO_VALUE from subliminal import (compute_score, ProviderPool, provider_manager, refine, refiner_manager, region, save_subtitles, scan_video) @@ -162,7 +162,7 @@ def needs_subtitles(subtitles): if not wanted: return False - if isinstance(subtitles, basestring): + if isinstance(subtitles, string_types): subtitles = {subtitle.strip() for subtitle in subtitles.split(',') if subtitle.strip()} if sickbeard.SUBTITLES_MULTI: From a6c9c37e2729b9db83a5c70cf4978a3a22a375ec Mon Sep 17 00:00:00 2001 From: labrys Date: Tue, 21 Jun 2016 03:55:59 -0400 Subject: [PATCH 105/134] PY3 has no unicode method --- sickbeard/db.py | 8 +++-- sickbeard/helpers.py | 16 ++++----- sickbeard/logger.py | 6 ++-- sickbeard/metadata/generic.py | 16 ++++----- sickbeard/name_parser/parser.py | 6 ++-- sickbeard/notifiers/kodi.py | 3 +- sickbeard/postProcessor.py | 28 +++++++-------- sickbeard/providers/freshontv.py | 7 ++-- sickbeard/server/api/core.py | 8 ++--- sickbeard/subtitles.py | 6 ++-- sickbeard/tv.py | 59 ++++++++++++++++---------------- sickbeard/tvcache.py | 14 ++++---- sickrage/helper/common.py | 21 +++++++----- sickrage/helper/encoding.py | 20 ++++++----- sickrage/helper/exceptions.py | 8 +++-- sickrage/show/History.py | 4 ++- tests/encoding_tests.py | 3 +- 17 files changed, 123 insertions(+), 110 deletions(-) diff --git a/sickbeard/db.py b/sickbeard/db.py index b53a17e301..ac15c68a0b 100644 --- a/sickbeard/db.py +++ b/sickbeard/db.py @@ -24,8 +24,10 @@ import sqlite3 import time import threading -import sickbeard +from six import text_type + +import sickbeard from sickbeard import logger from sickrage.helper.encoding import ek from sickrage.helper.exceptions import ex @@ -350,9 +352,9 @@ def _unicode_text_factory(x): """ try: # Just revert to the old code for now, until we can fix unicode - return unicode(x, 'utf-8') + return text_type(x, 'utf-8') except: - return unicode(x, sickbeard.SYS_ENCODING, errors="ignore") + return text_type(x, sickbeard.SYS_ENCODING, errors="ignore") @staticmethod def _dict_factory(cursor, row): diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py index 1680b5a3f9..44918461dd 100644 --- a/sickbeard/helpers.py +++ b/sickbeard/helpers.py @@ -50,7 +50,7 @@ import requests from requests.compat import urlparse import shutil_custom -from six import iteritems +from six import iteritems, text_type from six.moves import http_client import sickbeard @@ -413,7 +413,7 @@ def link(src, dst): """ if os.name == 'nt': - if ctypes.windll.kernel32.CreateHardLinkW(unicode(dst), unicode(src), 0) == 0: + if ctypes.windll.kernel32.CreateHardLinkW(text_type(dst), text_type(src), 0) == 0: raise ctypes.WinError() else: ek(os.link, src, dst) @@ -445,7 +445,7 @@ def symlink(src, dst): """ if os.name == 'nt': - if ctypes.windll.kernel32.CreateSymbolicLinkW(unicode(dst), unicode(src), 1 if ek(os.path.isdir, src) else 0) in [0, 1280]: + if ctypes.windll.kernel32.CreateSymbolicLinkW(text_type(dst), text_type(src), 1 if ek(os.path.isdir, src) else 0) in [0, 1280]: raise ctypes.WinError() else: ek(os.symlink, src, dst) @@ -837,8 +837,8 @@ def create_https_certificates(ssl_cert, ssl_key): :return: True on success, False on failure """ - # assert isinstance(ssl_key, unicode) - # assert isinstance(ssl_cert, unicode) + # assert isinstance(ssl_key, text_type) + # assert isinstance(ssl_cert, text_type) try: from OpenSSL import crypto # @UnresolvedImport @@ -1111,7 +1111,7 @@ def is_hidden(filepath): def has_hidden_attribute(filepath): try: - attrs = ctypes.windll.kernel32.GetFileAttributesW(unicode(filepath)) + attrs = ctypes.windll.kernel32.GetFileAttributesW(text_type(filepath)) assert attrs != -1 result = bool(attrs & 2) except (AttributeError, AssertionError): @@ -1422,12 +1422,12 @@ def getURL(url, post_data=None, params=None, headers=None, # pylint:disable=too if params and isinstance(params, (list, dict)): for param in params: - if isinstance(params[param], unicode): + if isinstance(params[param], text_type): params[param] = params[param].encode('utf-8') if post_data and isinstance(post_data, (list, dict)): for param in post_data: - if isinstance(post_data[param], unicode): + if isinstance(post_data[param], text_type): post_data[param] = post_data[param].encode('utf-8') resp = session.request( diff --git a/sickbeard/logger.py b/sickbeard/logger.py index d6e25e6f40..ebe3ce1b49 100644 --- a/sickbeard/logger.py +++ b/sickbeard/logger.py @@ -40,7 +40,7 @@ import subliminal from requests.compat import quote -from six import itervalues +from six import itervalues, text_type from github import Github, InputFileContent # pylint: disable=import-error import sickbeard @@ -158,7 +158,7 @@ def format(self, record): else: msg = super(CensoredFormatter, self).format(record) - if not isinstance(msg, unicode): + if not isinstance(msg, text_type): msg = msg.decode(self.encoding, 'replace') # Convert to unicode # Change the SSL error to a warning with a link to information about how to fix it. @@ -177,7 +177,7 @@ def format(self, record): # convert set items to unicode and typecast to list censored = list({ item.decode(self.encoding, 'replace') - if not isinstance(item, unicode) else item + if not isinstance(item, text_type) else item for item in censored }) # sort the list in order of descending length so that entire item is censored diff --git a/sickbeard/metadata/generic.py b/sickbeard/metadata/generic.py index 4e2a74bef4..3fada7a9f6 100644 --- a/sickbeard/metadata/generic.py +++ b/sickbeard/metadata/generic.py @@ -22,7 +22,7 @@ import io import re -from six import iterkeys +from six import iterkeys, text_type from tmdb_api.tmdb_api import TMDB import sickbeard @@ -117,7 +117,7 @@ def set_config(self, string): @staticmethod def _check_exists(location): if location: - assert isinstance(location, unicode) + assert isinstance(location, text_type) result = ek(os.path.isfile, location) logger.log(u"Checking if " + location + " exists: " + str(result), logger.DEBUG) return result @@ -174,7 +174,7 @@ def get_episode_thumb_path(ep_obj): Returns the path where the episode thumbnail should be stored. ep_obj: a TVEpisode instance for which to create the thumbnail """ - assert isinstance(ep_obj.location, unicode) + assert isinstance(ep_obj.location, text_type) if ek(os.path.isfile, ep_obj.location): tbn_filename = ep_obj.location.rpartition(".") @@ -266,7 +266,7 @@ def update_show_indexer_metadata(self, show_obj): logger.DEBUG) nfo_file_path = self.get_show_file_path(show_obj) - assert isinstance(nfo_file_path, unicode) + assert isinstance(nfo_file_path, text_type) try: with io.open(nfo_file_path, 'rb') as xmlFileObj: @@ -398,7 +398,7 @@ def write_show_file(self, show_obj): return False nfo_file_path = self.get_show_file_path(show_obj) - assert isinstance(nfo_file_path, unicode) + assert isinstance(nfo_file_path, text_type) nfo_file_dir = ek(os.path.dirname, nfo_file_path) @@ -444,7 +444,7 @@ def write_ep_file(self, ep_obj): return False nfo_file_path = self.get_episode_file_path(ep_obj) - assert isinstance(nfo_file_path, unicode) + assert isinstance(nfo_file_path, text_type) nfo_file_dir = ek(os.path.dirname, nfo_file_path) try: @@ -683,7 +683,7 @@ def _write_image(self, image_data, image_path, obj=None): image_path: file location to save the image to """ - assert isinstance(image_path, unicode) + assert isinstance(image_path, text_type) # don't bother overwriting it if ek(os.path.isfile, image_path): @@ -894,7 +894,7 @@ def retrieveShowMetadata(self, folder): empty_return = (None, None, None) - assert isinstance(folder, unicode) + assert isinstance(folder, text_type) metadata_path = ek(os.path.join, folder, self._show_metadata_filename) diff --git a/sickbeard/name_parser/parser.py b/sickbeard/name_parser/parser.py index d177082186..cd10af84bf 100644 --- a/sickbeard/name_parser/parser.py +++ b/sickbeard/name_parser/parser.py @@ -23,7 +23,7 @@ import time import dateutil -from six import string_types +from six import string_types, text_type import sickbeard from sickbeard.name_parser import regexes @@ -365,8 +365,8 @@ def _combine_results(first, second, attr): @staticmethod def _unicodify(obj, encoding="utf-8"): if isinstance(obj, string_types): - if not isinstance(obj, unicode): - obj = unicode(obj, encoding, 'replace') + if not isinstance(obj, text_type): + obj = text_type(obj, encoding, 'replace') return obj @staticmethod diff --git a/sickbeard/notifiers/kodi.py b/sickbeard/notifiers/kodi.py index 02ca6d2b30..744b92d5d0 100644 --- a/sickbeard/notifiers/kodi.py +++ b/sickbeard/notifiers/kodi.py @@ -24,6 +24,7 @@ import time from requests.compat import urlencode, unquote, unquote_plus, quote +from six import text_type from six.moves.urllib.request import Request, urlopen from six.moves.urllib.error import URLError from six.moves.http_client import BadStatusLine @@ -215,7 +216,7 @@ def _send_to_kodi(command, host=None, username=None, password=None, dest_app="KO return False for key in command: - if isinstance(command[key], unicode): + if isinstance(command[key], text_type): command[key] = command[key].encode('utf-8') enc_command = urlencode(command) diff --git a/sickbeard/postProcessor.py b/sickbeard/postProcessor.py index 631c2cd92c..9531df3842 100644 --- a/sickbeard/postProcessor.py +++ b/sickbeard/postProcessor.py @@ -26,26 +26,22 @@ import subprocess import stat -import sickbeard +import adba +from babelfish import language_converters +from six import text_type -from sickbeard import db -from sickbeard import common -from sickbeard import helpers -from sickbeard import history -from sickbeard import logger -from sickbeard import notifiers -from sickbeard import show_name_helpers -from sickbeard import failed_history +import sickbeard +from sickbeard import ( + db, common, helpers, history, logger, notifiers, show_name_helpers, failed_history, +) +from sickbeard.helpers import verify_freespace from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException + from sickrage.helper.common import remove_extension, replace_extension, subtitle_extensions from sickrage.helper.encoding import ek from sickrage.helper.exceptions import EpisodeNotFoundException, EpisodePostProcessingFailedException, ex from sickrage.helper.exceptions import ShowDirectoryNotFoundException from sickrage.show.Show import Show -from babelfish import language_converters - -import adba -from sickbeard.helpers import verify_freespace class PostProcessor(object): # pylint: disable=too-many-instance-attributes @@ -846,7 +842,7 @@ def _run_extra_scripts(self, ep_obj): return file_path = self.file_path - if isinstance(file_path, unicode): + if isinstance(file_path, text_type): try: file_path = file_path.encode(sickbeard.SYS_ENCODING) except UnicodeEncodeError: @@ -854,7 +850,7 @@ def _run_extra_scripts(self, ep_obj): pass ep_location = ep_obj.location - if isinstance(ep_location, unicode): + if isinstance(ep_location, text_type): try: ep_location = ep_location.encode(sickbeard.SYS_ENCODING) except UnicodeEncodeError: @@ -862,7 +858,7 @@ def _run_extra_scripts(self, ep_obj): pass for curScriptName in sickbeard.EXTRA_SCRIPTS: - if isinstance(curScriptName, unicode): + if isinstance(curScriptName, text_type): try: curScriptName = curScriptName.encode(sickbeard.SYS_ENCODING) except UnicodeEncodeError: diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index 6dd3335295..ba412d0750 100644 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -24,6 +24,7 @@ from requests.compat import urljoin from requests.utils import add_dict_to_cookiejar, dict_from_cookiejar +from six import text_type from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser @@ -130,7 +131,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for i in range(1, max_page_number): time.sleep(1) - page_search_url = search_url + '&page=' + unicode(i) + page_search_url = search_url + '&page=' + text_type(i) # '.log('Search string: ' + page_search_url, logger.DEBUG) page_html = self.get_url(page_search_url, returns='text') @@ -159,7 +160,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man title = individual_torrent.find('a', class_='torrent_name_link')['title'] details_url = individual_torrent.find('a', class_='torrent_name_link')['href'] torrent_id = int((re.match('.*?([0-9]+)$', details_url).group(1)).strip()) - download_url = self.urls['download'] % (unicode(torrent_id)) + download_url = self.urls['download'] % (text_type(torrent_id)) if not all([title, download_url]): continue @@ -175,7 +176,7 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man continue torrent_size = individual_torrent.find('td', class_='table_size').get_text(strip=True) - torrent_size = re.split('(\d+.?\d+)', unicode(torrent_size), 1) + torrent_size = re.split('(\d+.?\d+)', text_type(torrent_size), 1) torrent_size = '{0} {1}'.format(torrent_size[1], torrent_size[2]) size = convert_size(torrent_size) or -1 diff --git a/sickbeard/server/api/core.py b/sickbeard/server/api/core.py index d9a6432d2a..9b4e567905 100644 --- a/sickbeard/server/api/core.py +++ b/sickbeard/server/api/core.py @@ -33,7 +33,7 @@ import traceback from requests.compat import unquote_plus -from six import iteritems +from six import iteritems, text_type from tornado.web import RequestHandler import sickbeard @@ -1401,7 +1401,7 @@ def run(self): root_dirs_new = [unquote_plus(x) for x in root_dirs] root_dirs_new.insert(0, index) - root_dirs_new = '|'.join(unicode(x) for x in root_dirs_new) + root_dirs_new = '|'.join(text_type(x) for x in root_dirs_new) sickbeard.ROOT_DIRS = root_dirs_new return _responds(RESULT_SUCCESS, _get_root_dirs(), msg="Root directories updated") @@ -1504,7 +1504,7 @@ def run(self): root_dirs_new = [unquote_plus(x) for x in root_dirs_new] if root_dirs_new: root_dirs_new.insert(0, new_index) - root_dirs_new = "|".join(unicode(x) for x in root_dirs_new) + root_dirs_new = "|".join(text_type(x) for x in root_dirs_new) sickbeard.ROOT_DIRS = root_dirs_new # what if the root dir was not found? @@ -1703,7 +1703,7 @@ def run(self): # found show results = [{indexer_ids[_indexer]: int(my_show.data['id']), - "name": unicode(my_show.data['seriesname']), + "name": text_type(my_show.data['seriesname']), "first_aired": my_show.data['firstaired'], "indexer": int(_indexer)}] break diff --git a/sickbeard/subtitles.py b/sickbeard/subtitles.py index 24b64e6060..c30abd9285 100644 --- a/sickbeard/subtitles.py +++ b/sickbeard/subtitles.py @@ -29,7 +29,7 @@ import traceback from babelfish import Language, language_converters -from six import iteritems, string_types +from six import iteritems, string_types, text_type from dogpile.cache.api import NO_VALUE from subliminal import (compute_score, ProviderPool, provider_manager, refine, refiner_manager, region, save_subtitles, scan_video) @@ -509,7 +509,7 @@ def get_subtitle_description(subtitle): :rtype: str """ desc = None - sub_id = unicode(subtitle.id) + sub_id = text_type(subtitle.id) if hasattr(subtitle, 'filename') and subtitle.filename: desc = subtitle.filename.lower() elif hasattr(subtitle, 'name') and subtitle.name: @@ -517,7 +517,7 @@ def get_subtitle_description(subtitle): if hasattr(subtitle, 'release') and subtitle.release: desc = subtitle.release.lower() if hasattr(subtitle, 'releases') and subtitle.releases: - desc = unicode(subtitle.releases).lower() + desc = text_type(subtitle.releases).lower() if not desc: desc = sub_id diff --git a/sickbeard/tv.py b/sickbeard/tv.py index 223942e6d0..525e68355a 100644 --- a/sickbeard/tv.py +++ b/sickbeard/tv.py @@ -25,45 +25,46 @@ import glob import stat import traceback - -try: - import xml.etree.cElementTree as etree -except ImportError: - import xml.etree.ElementTree as etree - -try: - from send2trash import send2trash -except ImportError: - pass +import shutil from imdb import imdb +import shutil_custom +from six import text_type import sickbeard -from sickbeard import db -from sickbeard import helpers, logger -from sickbeard import image_cache -from sickbeard import notifiers -from sickbeard import postProcessor -from sickbeard import subtitles +from sickbeard import ( + db, helpers, logger, image_cache, notifiers, postProcessor, subtitles, network_timezones, +) from sickbeard.blackandwhitelist import BlackAndWhiteList -from sickbeard import network_timezones +from sickbeard.common import ( + Quality, Overview, statusStrings, + DOWNLOADED, SNATCHED, SNATCHED_PROPER, ARCHIVED, IGNORED, UNAIRED, WANTED, SKIPPED, UNKNOWN, + NAMING_DUPLICATE, NAMING_EXTEND, NAMING_LIMITED_EXTEND, NAMING_SEPARATED_REPEAT, NAMING_LIMITED_EXTEND_E_PREFIXED, +) from sickbeard.indexers.indexer_config import INDEXER_TVRAGE from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException -from sickrage.helper.common import dateTimeFormat, remove_extension, replace_extension, sanitize_filename, try_int, episode_num + +from sickrage.helper.common import ( + dateTimeFormat, remove_extension, replace_extension, sanitize_filename, try_int, episode_num, +) from sickrage.helper.encoding import ek -from sickrage.helper.exceptions import EpisodeDeletedException, EpisodeNotFoundException, ex -from sickrage.helper.exceptions import MultipleEpisodesInDatabaseException, MultipleShowsInDatabaseException -from sickrage.helper.exceptions import MultipleShowObjectsException, NoNFOException, ShowDirectoryNotFoundException -from sickrage.helper.exceptions import ShowNotFoundException +from sickrage.helper.exceptions import ( + EpisodeDeletedException, EpisodeNotFoundException, ex, + MultipleEpisodesInDatabaseException, MultipleShowsInDatabaseException, MultipleShowObjectsException, + NoNFOException, ShowDirectoryNotFoundException, ShowNotFoundException, +) from sickrage.show.Show import Show -from sickbeard.common import Quality, Overview, statusStrings -from sickbeard.common import DOWNLOADED, SNATCHED, SNATCHED_PROPER, ARCHIVED, IGNORED, UNAIRED, WANTED, SKIPPED, UNKNOWN -from sickbeard.common import NAMING_DUPLICATE, NAMING_EXTEND, NAMING_LIMITED_EXTEND, NAMING_SEPARATED_REPEAT, \ - NAMING_LIMITED_EXTEND_E_PREFIXED -import shutil -import shutil_custom +try: + import xml.etree.cElementTree as etree +except ImportError: + import xml.etree.ElementTree as etree + +try: + from send2trash import send2trash +except ImportError: + pass shutil.copyfile = shutil_custom.copyfile_custom @@ -495,7 +496,7 @@ def loadEpisodesFromDB(self): # pylint: disable=too-many-locals curSeason = int(curResult["season"]) curEpisode = int(curResult["episode"]) curShowid = int(curResult['showid']) - curShowName = unicode(curResult['show_name']) + curShowName = text_type(curResult['show_name']) logger.log(u"%s: Loading %s episodes from DB" % (curShowid, curShowName), logger.DEBUG) deleteEp = False diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index a0f7267809..b09b72d5a2 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -23,14 +23,16 @@ import datetime import itertools import traceback + +from six import text_type + import sickbeard -from sickbeard import db -from sickbeard import logger +from sickbeard import db, logger, show_name_helpers +from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException from sickbeard.rssfeeds import getFeed -from sickbeard import show_name_helpers + from sickrage.helper.exceptions import AuthException, ex from sickrage.show.Show import Show -from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException class CacheDBConnection(db.DBConnection): @@ -217,7 +219,7 @@ def updateCache(self): if cl: cache_db_con.mass_action(cl) - # finished processing, let's save the newest x (index) items and store these in cache with a max of 5 + # finished processing, let's save the newest x (index) items and store these in cache with a max of 5 # (overwritable per provider, throug hthe max_recent_items attribute. self.provider.recent_results = data['entries'][0:min(index, self.provider.max_recent_items)] @@ -376,7 +378,7 @@ def _addCacheEntry(self, name, url, seeders, leechers, size, pubdate, torrent_ha # get quality of release quality = parse_result.quality - assert isinstance(name, unicode) + assert isinstance(name, text_type) # get release group release_group = parse_result.release_group diff --git a/sickrage/helper/common.py b/sickrage/helper/common.py index 84c854d8f8..4361d24d10 100644 --- a/sickrage/helper/common.py +++ b/sickrage/helper/common.py @@ -20,11 +20,14 @@ from __future__ import unicode_literals import re -import sickbeard from fnmatch import fnmatch import logging import traceback +from six import text_type + +import sickbeard + logger = logging.getLogger(__name__) dateFormat = '%Y-%m-%d' @@ -137,7 +140,7 @@ def is_sync_file(filename): :return: ``True`` if the ``filename`` is a sync file, ``False`` otherwise """ - if isinstance(filename, (str, unicode)): + if isinstance(filename, (str, text_type)): extension = filename.rpartition('.')[2].lower() return extension in sickbeard.SYNC_FILES.split(',') or \ @@ -154,7 +157,7 @@ def is_torrent_or_nzb_file(filename): :return: ``True`` if the ``filename`` is a NZB file or a torrent file, ``False`` otherwise """ - if not isinstance(filename, (str, unicode)): + if not isinstance(filename, (str, text_type)): return False return filename.rpartition('.')[2].lower() in ['nzb', 'torrent'] @@ -247,7 +250,7 @@ def remove_extension(filename): :return: The ``filename`` without its extension. """ - if isinstance(filename, (str, unicode)) and '.' in filename: + if isinstance(filename, (str, text_type)) and '.' in filename: basename, _, extension = filename.rpartition('.') if basename and extension.lower() in ['nzb', 'torrent'] + media_extensions: @@ -264,7 +267,7 @@ def replace_extension(filename, new_extension): :return: The ``filename`` with the new extension """ - if isinstance(filename, (str, unicode)) and '.' in filename: + if isinstance(filename, (str, text_type)) and '.' in filename: basename, _, _ = filename.rpartition('.') if basename: @@ -280,7 +283,7 @@ def sanitize_filename(filename): :return: The ``filename``cleaned """ - if isinstance(filename, (str, unicode)): + if isinstance(filename, (str, text_type)): filename = re.sub(r'[\\/\*]', '-', filename) filename = re.sub(r'[:"<>|?]', '', filename) filename = re.sub(r'™', '', filename) # Trade Mark Sign unicode: \u2122 @@ -327,7 +330,7 @@ def episode_num(season=None, episode=None, **kwargs): return '{0:0>3}'.format(season or episode) def enabled_providers(search_type): - """ + """ Return providers based on search type: daily, backlog and manualsearch """ return [x for x in sickbeard.providers.sortedProviderList(sickbeard.RANDOMIZE_PROVIDERS) @@ -350,5 +353,5 @@ def remove_strings(old_string, unwanted_strings): for item in unwanted_strings: old_string = old_string.replace(item, '') return old_string - - + + diff --git a/sickrage/helper/encoding.py b/sickrage/helper/encoding.py index 1ef64e70b8..31b157226d 100644 --- a/sickrage/helper/encoding.py +++ b/sickrage/helper/encoding.py @@ -18,11 +18,13 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . -import sickbeard - from chardet import detect from os import name +from six import text_type + +import sickbeard + def ek(function, *args, **kwargs): """ @@ -37,7 +39,7 @@ def ek(function, *args, **kwargs): if name == 'nt': result = function(*args, **kwargs) else: - result = function(*[ss(x) if isinstance(x, (str, unicode)) else x for x in args], **kwargs) + result = function(*[ss(x) if isinstance(x, (str, text_type)) else x for x in args], **kwargs) if isinstance(result, (list, tuple)): return _fix_list_encoding(result) @@ -96,21 +98,21 @@ def _to_unicode(var): if isinstance(var, str): try: - var = unicode(var) + var = text_type(var) except Exception: try: - var = unicode(var, 'utf-8') + var = text_type(var, 'utf-8') except Exception: try: - var = unicode(var, 'latin-1') + var = text_type(var, 'latin-1') except Exception: try: - var = unicode(var, sickbeard.SYS_ENCODING) + var = text_type(var, sickbeard.SYS_ENCODING) except Exception: try: # Chardet can be wrong, so try it last - var = unicode(var, detect(var).get('encoding')) + var = text_type(var, detect(var).get('encoding')) except Exception: - var = unicode(var, sickbeard.SYS_ENCODING, 'replace') + var = text_type(var, sickbeard.SYS_ENCODING, 'replace') return var diff --git a/sickrage/helper/exceptions.py b/sickrage/helper/exceptions.py index 2c2874e5cd..85038a9a05 100644 --- a/sickrage/helper/exceptions.py +++ b/sickrage/helper/exceptions.py @@ -17,6 +17,8 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from six import text_type + from sickrage.helper.encoding import ss @@ -33,7 +35,7 @@ def ex(e): for arg in e.args: if arg is not None: - if isinstance(arg, (str, unicode)): + if isinstance(arg, (str, text_type)): fixed_arg = ss(arg) else: try: @@ -49,8 +51,8 @@ def ex(e): message = u'{} : {}'.format(message, fixed_arg) except UnicodeError: message = u'{} : {}'.format( - unicode(message, errors='replace'), - unicode(fixed_arg, errors='replace')) + text_type(message, errors='replace'), + text_type(fixed_arg, errors='replace')) return message diff --git a/sickrage/show/History.py b/sickrage/show/History.py index e0beda71fd..73b7766d27 100644 --- a/sickrage/show/History.py +++ b/sickrage/show/History.py @@ -19,6 +19,8 @@ from collections import namedtuple from datetime import datetime, timedelta +from six import text_type + from sickbeard.common import Quality from sickbeard.db import DBConnection @@ -102,7 +104,7 @@ def trim(self, days=30): @staticmethod def _get_actions(action): - action = action.lower() if isinstance(action, (str, unicode)) else '' + action = action.lower() if isinstance(action, (str, text_type)) else '' result = None if action == 'downloaded': diff --git a/tests/encoding_tests.py b/tests/encoding_tests.py index 626ea831d6..5b0cf9cf41 100644 --- a/tests/encoding_tests.py +++ b/tests/encoding_tests.py @@ -14,6 +14,7 @@ sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib'))) sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) +from six import text_type import sickbeard from sickbeard import ek, ex from sickrage.helper.common import sanitize_filename @@ -45,7 +46,7 @@ def test_encoding(self): for test in strings: try: show_dir = ek(os.path.join, root_dir, sanitize_filename(test)) - self.assertTrue(isinstance(show_dir, unicode)) + self.assertTrue(isinstance(show_dir, text_type)) except Exception as error: # pylint: disable=broad-except ex(error) From 14de8557898d577eec6e925d82d36fa6b3349870 Mon Sep 17 00:00:00 2001 From: labrys Date: Tue, 21 Jun 2016 04:45:23 -0400 Subject: [PATCH 106/134] PY3 does not support exception arguments delimited by a comma --- sickbeard/name_parser/parser.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sickbeard/name_parser/parser.py b/sickbeard/name_parser/parser.py index cd10af84bf..61a82d3123 100644 --- a/sickbeard/name_parser/parser.py +++ b/sickbeard/name_parser/parser.py @@ -97,8 +97,8 @@ def _compile_regexes(self, regexMode): for cur_pattern_num, (cur_pattern_name, cur_pattern) in enumerate(regexItem): try: cur_regex = re.compile(cur_pattern, re.VERBOSE | re.IGNORECASE) - except re.error, errormsg: - logger.log(u"WARNING: Invalid episode_pattern using %s regexs, %s. %s" % (dbg_str, errormsg, cur_pattern)) + except re.error as msg: + logger.log(u"WARNING: Invalid episode_pattern using %s regexs, %s. %s" % (dbg_str, msg, cur_pattern)) else: self.compiled_regexes.append((cur_pattern_num, cur_pattern_name, cur_regex)) From 16424d680c72821cfeb86ab3b71d2a24b4bc57f7 Mon Sep 17 00:00:00 2001 From: labrys Date: Tue, 21 Jun 2016 04:47:38 -0400 Subject: [PATCH 107/134] PY3 does not have module urlparse --- sickbeard/providers/tntvillage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index 6b5264be72..9268eea981 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -22,7 +22,7 @@ import re import traceback -from urlparse import parse_qs +from six.moves.urllib_parse import parse_qs from requests.utils import dict_from_cookiejar from requests.compat import urljoin From 4b46a8f6e56b449a5d759ef6b484bb1f298fe807 Mon Sep 17 00:00:00 2001 From: Labrys Date: Tue, 21 Jun 2016 22:35:29 -0400 Subject: [PATCH 108/134] PY3 print is a function --- tests/common_tests.py | 10 +++-- tests/db_tests.py | 12 ++++-- tests/encoding_tests.py | 12 ++++-- tests/feedparser_tests.py | 12 ++++-- tests/issue_submitter_tests.py | 12 ++++-- tests/name_parser_tests.py | 37 +++++++++---------- tests/notifier_tests.py | 13 ++++--- tests/numdict_tests.py | 2 + tests/pp_tests.py | 14 ++++--- tests/search_tests.py | 28 ++++++++------ tests/sickrage_tests/helper/__init__.py | 2 + tests/sickrage_tests/media/__init__.py | 2 + .../sickrage_tests/media/show_banner_tests.py | 2 + .../media/show_fan_art_tests.py | 2 + .../media/show_network_logo_tests.py | 2 + .../sickrage_tests/media/show_poster_tests.py | 2 + tests/sickrage_tests/providers/__init__.py | 2 + tests/sickrage_tests/show/__init__.py | 2 + tests/sickrage_tests/system/__init__.py | 2 + tests/snatch_tests.py | 11 ++++-- tests/ssl_sni_tests.py | 20 ++++++---- tests/test_lib.py | 8 ++-- tests/torrent_tests.py | 16 +++++--- tests/tv_tests.py | 16 +++++--- 24 files changed, 154 insertions(+), 87 deletions(-) diff --git a/tests/common_tests.py b/tests/common_tests.py index f6a3a722fb..0cd8582917 100644 --- a/tests/common_tests.py +++ b/tests/common_tests.py @@ -27,6 +27,8 @@ # TODO: Implement skipped tests +from __future__ import print_function + import os.path import sys import unittest @@ -474,9 +476,11 @@ def test_overview_strings(self): self.assertEqual(overview.overviewStrings[overview.SNATCHED], "snatched") if __name__ == '__main__': - print "=======================" - print "STARTING - COMMON TESTS" - print "=======================" + print(""" + ======================= + STARTING - COMMON TESTS + ======================= + """) SUITE = unittest.TestLoader().loadTestsFromTestCase(QualityStringTests) unittest.TextTestRunner(verbosity=2).run(SUITE) diff --git a/tests/db_tests.py b/tests/db_tests.py index f9ebf89812..ee32a9c825 100644 --- a/tests/db_tests.py +++ b/tests/db_tests.py @@ -25,6 +25,8 @@ DBMultiTests """ +from __future__ import print_function + import os.path import sys import threading @@ -87,10 +89,12 @@ def test_threaded(self): thread.start() if __name__ == '__main__': - print "==================" - print "STARTING - DB TESTS" - print "==================" - print "######################################################################" + print(""" + ================== + STARTING - DB TESTS + ================== + ###################################################################### + """) SUITE = unittest.TestLoader().loadTestsFromTestCase(DBBasicTests) unittest.TextTestRunner(verbosity=2).run(SUITE) diff --git a/tests/encoding_tests.py b/tests/encoding_tests.py index 5b0cf9cf41..849dedecfa 100644 --- a/tests/encoding_tests.py +++ b/tests/encoding_tests.py @@ -1,5 +1,7 @@ # coding=utf-8 +from __future__ import print_function + """ Test encoding """ @@ -51,10 +53,12 @@ def test_encoding(self): ex(error) if __name__ == "__main__": - print "==================" - print "STARTING - ENCODING TESTS" - print "==================" - print "######################################################################" + print(""" + ================== + STARTING - ENCODING TESTS + ================== + ###################################################################### + """) SUITE = unittest.TestLoader().loadTestsFromTestCase(EncodingTests) unittest.TextTestRunner(verbosity=2).run(SUITE) diff --git a/tests/feedparser_tests.py b/tests/feedparser_tests.py index 09f615e4e8..5cca764782 100644 --- a/tests/feedparser_tests.py +++ b/tests/feedparser_tests.py @@ -1,5 +1,7 @@ # coding=utf-8 +from __future__ import print_function + """ Test Feed Parser """ @@ -30,10 +32,12 @@ def test_womble(self): self.assertTrue(title and url) if __name__ == "__main__": - print "==================" - print "STARTING - FEED PARSER TESTS" - print "==================" - print "######################################################################" + print(""" + ================== + STARTING - FEED PARSER TESTS + ================== + ###################################################################### + """) SUITE = unittest.TestLoader().loadTestsFromTestCase(FeedParserTests) TEST_RESULTS = unittest.TextTestRunner(verbosity=2).run(SUITE) diff --git a/tests/issue_submitter_tests.py b/tests/issue_submitter_tests.py index 283584d8c8..a58623e2f9 100644 --- a/tests/issue_submitter_tests.py +++ b/tests/issue_submitter_tests.py @@ -17,6 +17,8 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import print_function + """ Test exception logging """ @@ -55,10 +57,12 @@ def test_submitter(self): if __name__ == "__main__": - print "==================" - print "STARTING - ISSUE SUBMITTER TESTS" - print "==================" - print "######################################################################" + print(""" + ================== + STARTING - ISSUE SUBMITTER TESTS + ================== + ###################################################################### + """) SUITE = unittest.TestLoader().loadTestsFromTestCase(IssueSubmitterBasicTests) unittest.TextTestRunner(verbosity=2).run(SUITE) diff --git a/tests/name_parser_tests.py b/tests/name_parser_tests.py index cddd8d326d..86ad4735b9 100644 --- a/tests/name_parser_tests.py +++ b/tests/name_parser_tests.py @@ -5,6 +5,8 @@ # pylint: disable=line-too-long +from __future__ import print_function + import datetime import os.path import sys @@ -218,7 +220,7 @@ def _test_name(name): return True if VERBOSE: - print 'Actual: ', parse_result.which_regex, parse_result + print('Actual: ', parse_result.which_regex, parse_result) return False def test_failures(self): @@ -245,8 +247,7 @@ def _test_combo(self, name, result, which_regexes): """ if VERBOSE: - print - print 'Testing', name + print('\nTesting', name) name_parser = parser.NameParser(True) @@ -256,8 +257,8 @@ def _test_combo(self, name, result, which_regexes): return False if DEBUG: - print test_result, test_result.which_regex - print result, which_regexes + print(test_result, test_result.which_regex) + print(result, which_regexes) self.assertEqual(test_result, result) for cur_regex in which_regexes: @@ -295,8 +296,7 @@ def _test_names(self, name_parser, section, transform=None, verbose=False): """ if VERBOSE or verbose: - print - print 'Running', section, 'tests' + print('\nRunning', section, 'tests') for cur_test_base in SIMPLE_TEST_CASES[section]: if transform: cur_test = transform(cur_test_base) @@ -304,7 +304,7 @@ def _test_names(self, name_parser, section, transform=None, verbose=False): else: cur_test = cur_test_base if VERBOSE or verbose: - print 'Testing', cur_test + print('Testing', cur_test) result = SIMPLE_TEST_CASES[section][cur_test_base] @@ -318,10 +318,10 @@ def _test_names(self, name_parser, section, transform=None, verbose=False): test_result = name_parser.parse(cur_test) if DEBUG or verbose: - print 'air_by_date:', test_result.is_air_by_date, 'air_date:', test_result.air_date - print 'anime:', test_result.is_anime, 'ab_episode_numbers:', test_result.ab_episode_numbers - print test_result - print result + print('air_by_date:', test_result.is_air_by_date, 'air_date:', test_result.air_date) + print('anime:', test_result.is_anime, 'ab_episode_numbers:', test_result.ab_episode_numbers) + print(test_result) + print(result) self.assertEqual(test_result.which_regex, [section], '%s : %s != %s' % (cur_test, test_result.which_regex, [section])) self.assertEqual(str(test_result), str(result), '%s : %s != %s' % (cur_test, str(test_result), str(result))) @@ -459,8 +459,7 @@ def _test_names(self, name_parser, section, transform=None, verbose=False): :return: """ if VERBOSE or verbose: - print - print 'Running', section, 'tests' + print('\nRunning', section, 'tests') for cur_test_base in SIMPLE_TEST_CASES[section]: if transform: cur_test = transform(cur_test_base) @@ -468,7 +467,7 @@ def _test_names(self, name_parser, section, transform=None, verbose=False): else: cur_test = cur_test_base if VERBOSE or verbose: - print 'Testing', cur_test + print('Testing', cur_test) result = SIMPLE_TEST_CASES[section][cur_test_base] @@ -482,10 +481,10 @@ def _test_names(self, name_parser, section, transform=None, verbose=False): test_result = name_parser.parse(cur_test) if DEBUG or verbose: - print 'air_by_date:', test_result.is_air_by_date, 'air_date:', test_result.air_date - print 'anime:', test_result.is_anime, 'ab_episode_numbers:', test_result.ab_episode_numbers - print test_result - print result + print('air_by_date:', test_result.is_air_by_date, 'air_date:', test_result.air_date) + print('anime:', test_result.is_anime, 'ab_episode_numbers:', test_result.ab_episode_numbers) + print(test_result) + print(result) self.assertEqual(test_result.which_regex, [section], '%s : %s != %s' % (cur_test, test_result.which_regex, [section])) self.assertEqual(str(test_result), str(result), '%s : %s != %s' % (cur_test, str(test_result), str(result))) diff --git a/tests/notifier_tests.py b/tests/notifier_tests.py index 5818c0097d..cf5b1433de 100644 --- a/tests/notifier_tests.py +++ b/tests/notifier_tests.py @@ -29,6 +29,7 @@ Test notifiers """ +from __future__ import print_function import os.path import sys @@ -290,7 +291,7 @@ def _debug_spew(text): :return: """ if __name__ == '__main__' and text is not None: - print text + print(text) def _get_showid_by_showname(self, showname): """ @@ -306,10 +307,12 @@ def _get_showid_by_showname(self, showname): return -1 if __name__ == '__main__': - print "==================" - print "STARTING - NOTIFIER TESTS" - print "==================" - print "######################################################################" + print(""" + ================== + STARTING - NOTIFIER TESTS + ================== + ###################################################################### + """) SUITE = unittest.TestLoader().loadTestsFromTestCase(NotifierTests) unittest.TextTestRunner(verbosity=2).run(SUITE) diff --git a/tests/numdict_tests.py b/tests/numdict_tests.py index 7156439fc8..4512a5e48f 100644 --- a/tests/numdict_tests.py +++ b/tests/numdict_tests.py @@ -6,6 +6,8 @@ # pylint: disable=line-too-long +from __future__ import print_function + import os.path import sys import unittest diff --git a/tests/pp_tests.py b/tests/pp_tests.py index f5cadaca05..1226a5704c 100644 --- a/tests/pp_tests.py +++ b/tests/pp_tests.py @@ -17,6 +17,8 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import print_function + """ Test post processing """ @@ -84,15 +86,17 @@ def test_process(self): if __name__ == '__main__': - print "==================" - print "STARTING - PostProcessor TESTS" - print "==================" - print "######################################################################" + print(""" + ================== + STARTING - PostProcessor TESTS + ================== + ###################################################################### + """) SUITE = unittest.TestLoader().loadTestsFromTestCase(PPInitTests) unittest.TextTestRunner(verbosity=2).run(SUITE) - print "######################################################################" + print("######################################################################") SUITE = unittest.TestLoader().loadTestsFromTestCase(PPBasicTests) unittest.TextTestRunner(verbosity=2).run(SUITE) diff --git a/tests/search_tests.py b/tests/search_tests.py index 104388d8fb..abe744c5c8 100644 --- a/tests/search_tests.py +++ b/tests/search_tests.py @@ -20,6 +20,8 @@ # pylint: disable=line-too-long +from __future__ import print_function + """ Test searches """ @@ -92,8 +94,8 @@ def do_test(self): cur_string = '' for cur_string in season_strings, episode_strings: if not all([isinstance(cur_string, list), isinstance(cur_string[0], dict)]): - print " %s is using a wrong string format!" % cur_provider.name - print cur_string + print(" %s is using a wrong string format!" % cur_provider.name) + print(cur_string) fail = True continue @@ -104,8 +106,8 @@ def do_test(self): assert season_strings == cur_data["s_strings"] assert episode_strings == cur_data["e_strings"] except AssertionError: - print " %s is using a wrong string format!" % cur_provider.name - print cur_string + print (" %s is using a wrong string format!" % cur_provider.name) + print (cur_string) continue search_strings = episode_strings[0] @@ -119,33 +121,35 @@ def do_test(self): items = cur_provider.search(search_strings) # pylint: disable=protected-access if not items: - print "No results from cur_provider?" + print("No results from cur_provider?") continue title, url = cur_provider._get_title_and_url(items[0]) # pylint: disable=protected-access for word in show.name.split(" "): if not word.lower() in title.lower(): - print "Show cur_name not in title: %s. URL: %s" % (title, url) + print("Show cur_name not in title: %s. URL: %s" % (title, url)) continue if not url: - print "url is empty" + print("url is empty") continue quality = cur_provider.get_quality(items[0]) size = cur_provider._get_size(items[0]) # pylint: disable=protected-access if not show.quality & quality: - print "Quality not in common.ANY, %r %s" % (quality, size) + print("Quality not in common.ANY, %r %s" % (quality, size)) continue return do_test if __name__ == '__main__': - print "==================" - print "STARTING - Search TESTS" - print "==================" - print "######################################################################" + print(""" + ================== + STARTING - Search TESTS + ================== + ###################################################################### + """) # create the test methods for forceSearch in (True, False): for name, data in TESTS.items(): diff --git a/tests/sickrage_tests/helper/__init__.py b/tests/sickrage_tests/helper/__init__.py index 21e2b67f8f..e97a008f3a 100644 --- a/tests/sickrage_tests/helper/__init__.py +++ b/tests/sickrage_tests/helper/__init__.py @@ -3,6 +3,8 @@ Tests for SickRage helpers """ +from __future__ import print_function + import unittest from common_tests import CommonTests diff --git a/tests/sickrage_tests/media/__init__.py b/tests/sickrage_tests/media/__init__.py index 914787236e..8db1f8a3be 100644 --- a/tests/sickrage_tests/media/__init__.py +++ b/tests/sickrage_tests/media/__init__.py @@ -3,6 +3,8 @@ Tests for SickRage media """ +from __future__ import print_function + import unittest from generic_media_tests import GenericMediaTests diff --git a/tests/sickrage_tests/media/show_banner_tests.py b/tests/sickrage_tests/media/show_banner_tests.py index 01af8e761b..7474b94a7f 100644 --- a/tests/sickrage_tests/media/show_banner_tests.py +++ b/tests/sickrage_tests/media/show_banner_tests.py @@ -21,6 +21,8 @@ Test ShowBanner """ +from __future__ import print_function + from generic_media_tests import GenericMediaTests import os diff --git a/tests/sickrage_tests/media/show_fan_art_tests.py b/tests/sickrage_tests/media/show_fan_art_tests.py index 8ca99b66f2..49a36559ba 100644 --- a/tests/sickrage_tests/media/show_fan_art_tests.py +++ b/tests/sickrage_tests/media/show_fan_art_tests.py @@ -21,6 +21,8 @@ Test ShowFanArt """ +from __future__ import print_function + from generic_media_tests import GenericMediaTests import os diff --git a/tests/sickrage_tests/media/show_network_logo_tests.py b/tests/sickrage_tests/media/show_network_logo_tests.py index ab783d4100..72ad7bd465 100644 --- a/tests/sickrage_tests/media/show_network_logo_tests.py +++ b/tests/sickrage_tests/media/show_network_logo_tests.py @@ -21,6 +21,8 @@ Test ShowNetworkLogo """ +from __future__ import print_function + from generic_media_tests import GenericMediaTests import os diff --git a/tests/sickrage_tests/media/show_poster_tests.py b/tests/sickrage_tests/media/show_poster_tests.py index 5980cdf0ab..29faf21adc 100644 --- a/tests/sickrage_tests/media/show_poster_tests.py +++ b/tests/sickrage_tests/media/show_poster_tests.py @@ -21,6 +21,8 @@ Test ShowPoster """ +from __future__ import print_function + from generic_media_tests import GenericMediaTests import os diff --git a/tests/sickrage_tests/providers/__init__.py b/tests/sickrage_tests/providers/__init__.py index e6567476f7..b0ff0ab5da 100644 --- a/tests/sickrage_tests/providers/__init__.py +++ b/tests/sickrage_tests/providers/__init__.py @@ -3,6 +3,8 @@ Tests for SickRage providers """ +from __future__ import print_function + import unittest from generic_provider_tests import GenericProviderTests diff --git a/tests/sickrage_tests/show/__init__.py b/tests/sickrage_tests/show/__init__.py index 906cca08b7..34bef9a608 100644 --- a/tests/sickrage_tests/show/__init__.py +++ b/tests/sickrage_tests/show/__init__.py @@ -3,6 +3,8 @@ Tests for SickRage show """ +from __future__ import print_function + import unittest from coming_episodes_tests import ComingEpisodesTests diff --git a/tests/sickrage_tests/system/__init__.py b/tests/sickrage_tests/system/__init__.py index 816646d0bb..f90a823a69 100644 --- a/tests/sickrage_tests/system/__init__.py +++ b/tests/sickrage_tests/system/__init__.py @@ -3,6 +3,8 @@ Tests for SickRage system """ +from __future__ import print_function + import unittest from restart_tests import RestartTests diff --git a/tests/snatch_tests.py b/tests/snatch_tests.py index 7594eddca9..6d476e932b 100644 --- a/tests/snatch_tests.py +++ b/tests/snatch_tests.py @@ -23,6 +23,7 @@ Test snatching """ +from __future__ import print_function import os.path import sys import unittest @@ -136,10 +137,12 @@ def do_test(): return do_test if __name__ == '__main__': - print "==================" - print "STARTING - Snatch TESTS" - print "==================" - print "######################################################################" + print(""" + ================== + STARTING - Snatch TESTS + ================== + ###################################################################### + """) # create the test methods cur_tvdb_id = 1 for forceSearch in (True, False): diff --git a/tests/ssl_sni_tests.py b/tests/ssl_sni_tests.py index 3050143012..8df8c24103 100644 --- a/tests/ssl_sni_tests.py +++ b/tests/ssl_sni_tests.py @@ -19,6 +19,8 @@ # pylint: disable=line-too-long +from __future__ import print_function + """ Test SNI and SSL """ @@ -50,19 +52,19 @@ def _connectivity_test(): :return: test to run """ if not _provider.url: - print '%s has no url set, skipping' % _provider.name + print('%s has no url set, skipping' % _provider.name) return try: requests.head(_provider.url, verify=certifi.old_where(), timeout=10) except requests.exceptions.SSLError as error: if 'certificate verify failed' in str(error): - print 'Cannot verify certificate for %s' % _provider.name + print('Cannot verify certificate for %s' % _provider.name) else: - print 'SSLError on %s: %s' % (_provider.name, ex(error.message)) + print('SSLError on %s: %s' % (_provider.name, ex(error.message))) raise except requests.exceptions.Timeout: - print 'Provider timed out' + print('Provider timed out') return _connectivity_test @@ -71,10 +73,12 @@ class SniTests(unittest.TestCase): pass if __name__ == "__main__": - print "==================" - print "STARTING - Provider Connectivity TESTS and SSL/SNI" - print "==================" - print "######################################################################" + print(""" + ================== + STARTING - Provider Connectivity TESTS and SSL/SNI + ================== + ###################################################################### + """) # Just checking all providers - we should make this error on non-existent urls. for provider in [p for p in providers.makeProviderList()]: test_name = 'test_%s' % provider.name diff --git a/tests/test_lib.py b/tests/test_lib.py index b9bf029d69..fec6eeabad 100644 --- a/tests/test_lib.py +++ b/tests/test_lib.py @@ -39,6 +39,8 @@ TestCacheDBConnection """ +from __future__ import print_function + import os.path import shutil import sys @@ -276,8 +278,8 @@ def teardown_test_db(): # try: # os.remove(file_name) # except Exception as e: - # print 'ERROR: Failed to remove ' + file_name - # print exception(e) + # print('ERROR: Failed to remove ' + file_name) + # print(exception(e)) def setup_test_episode_file(): @@ -294,7 +296,7 @@ def setup_test_episode_file(): # pylint: disable=broad-except # Catching too general exception except Exception: - print "Unable to set up test episode" + print("Unable to set up test episode") raise diff --git a/tests/torrent_tests.py b/tests/torrent_tests.py index 9d5fbda5ab..c6e390a456 100644 --- a/tests/torrent_tests.py +++ b/tests/torrent_tests.py @@ -17,6 +17,8 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import print_function + """ Test torrents """ @@ -94,7 +96,7 @@ def test_search(): # pylint: disable=too-many-locals # Continue only if one Release is found if len(torrent_rows) < 2: - print "The data returned does not contain any torrents" + print("The data returned does not contain any torrents") return for row in torrent_rows[1:]: @@ -112,12 +114,14 @@ def test_search(): # pylint: disable=too-many-locals except (AttributeError, TypeError): continue - print title + print(title) if __name__ == "__main__": - print "==================" - print "STARTING - Torrent Basic TESTS" - print "==================" - print "######################################################################" + print(""" + ================== + STARTING - Torrent Basic TESTS + ================== + ###################################################################### + """) SUITE = unittest.TestLoader().loadTestsFromTestCase(TorrentBasicTests) unittest.TextTestRunner(verbosity=2).run(SUITE) diff --git a/tests/tv_tests.py b/tests/tv_tests.py index c76cb96af4..03dc981207 100644 --- a/tests/tv_tests.py +++ b/tests/tv_tests.py @@ -21,6 +21,8 @@ Test tv """ +from __future__ import print_function + import os.path import sys import unittest @@ -139,15 +141,17 @@ def test_get_episode(): if __name__ == '__main__': - print "==================" - print "STARTING - TV TESTS" - print "==================" - print "######################################################################" + print(""" + ================== + STARTING - TV TESTS + ================== + ###################################################################### + """) SUITE = unittest.TestLoader().loadTestsFromTestCase(TVShowTests) unittest.TextTestRunner(verbosity=2).run(SUITE) - print "######################################################################" + print("######################################################################") SUITE = unittest.TestLoader().loadTestsFromTestCase(TVEpisodeTests) unittest.TextTestRunner(verbosity=2).run(SUITE) - print "######################################################################" + print("######################################################################") SUITE = unittest.TestLoader().loadTestsFromTestCase(TVTests) unittest.TextTestRunner(verbosity=2).run(SUITE) From 75d5c9a6985a08ba5d94b2a32e0abf888a6b143c Mon Sep 17 00:00:00 2001 From: Labrys Date: Sat, 25 Jun 2016 07:45:42 -0400 Subject: [PATCH 109/134] PY3 requires '0o' prefix for octal literals --- sickbeard/naming.py | 2 +- tests/helpers_tests.py | 10 ++++++---- tests/search_tests.py | 2 +- tests/torrent_tests.py | 2 +- tests/tv_tests.py | 16 ++++++++-------- 5 files changed, 17 insertions(+), 15 deletions(-) diff --git a/sickbeard/naming.py b/sickbeard/naming.py index 89f529d321..82b691a0c1 100644 --- a/sickbeard/naming.py +++ b/sickbeard/naming.py @@ -57,7 +57,7 @@ class TVShow(object): # pylint: disable=too-few-public-methods def __init__(self): self.name = "Show Name" self.genre = "Comedy" - self.indexerid = 00001 + self.indexerid = 1 self.air_by_date = 0 self.sports = 0 self.anime = 0 diff --git a/tests/helpers_tests.py b/tests/helpers_tests.py index f82f953d3f..38e153e57b 100644 --- a/tests/helpers_tests.py +++ b/tests/helpers_tests.py @@ -677,10 +677,12 @@ def test_pretty_time_delta(self): pass if __name__ == '__main__': - print("==================") - print("STARTING - Helpers TESTS") - print("==================") - print("######################################################################") + print(""" + ================== + STARTING - Helpers TESTS + ================== + ###################################################################### + """) for name, test_data in TEST_CASES.items(): test_name = 'test_%s' % name test = generator(test_data) diff --git a/tests/search_tests.py b/tests/search_tests.py index abe744c5c8..bd4d55609d 100644 --- a/tests/search_tests.py +++ b/tests/search_tests.py @@ -114,7 +114,7 @@ def do_test(self): # search_strings.update(season_strings[0]) # search_strings.update({"RSS":['']}) - # print search_strings + # print(search_strings) if not cur_provider.public: continue diff --git a/tests/torrent_tests.py b/tests/torrent_tests.py index c6e390a456..2e060578bf 100644 --- a/tests/torrent_tests.py +++ b/tests/torrent_tests.py @@ -51,7 +51,7 @@ def setUpClass(cls): show = TVShow(1, 121361) show.name = "Italian Works" show.episodes = [] - episode = TVEpisode(show, 05, 10) + episode = TVEpisode(show, 5, 10) episode.name = "Pines of Rome" episode.scene_season = 5 episode.scene_episode = 10 diff --git a/tests/tv_tests.py b/tests/tv_tests.py index 03dc981207..db6140da98 100644 --- a/tests/tv_tests.py +++ b/tests/tv_tests.py @@ -50,14 +50,14 @@ def test_init_indexerid(self): """ test init indexer id """ - show = TVShow(1, 0001, "en") - self.assertEqual(show.indexerid, 0001) + show = TVShow(1, 1, "en") + self.assertEqual(show.indexerid, 1) def test_change_indexerid(self): """ test change indexer id """ - show = TVShow(1, 0001, "en") + show = TVShow(1, 1, "en") show.name = "show name" show.network = "cbs" show.genre = "crime" @@ -70,17 +70,17 @@ def test_change_indexerid(self): show.saveToDB() show.loadFromDB() - show.indexerid = 0002 + show.indexerid = 2 show.saveToDB() show.loadFromDB() - self.assertEqual(show.indexerid, 0002) + self.assertEqual(show.indexerid, 2) def test_set_name(self): """ test set name """ - show = TVShow(1, 0001, "en") + show = TVShow(1, 1, "en") show.name = "newName" show.saveToDB() show.loadFromDB() @@ -102,7 +102,7 @@ def test_init_empty_db(self): """ test init empty db """ - show = TVShow(1, 0001, "en") + show = TVShow(1, 1, "en") episode = TVEpisode(show, 1, 1) episode.name = "asdasdasdajkaj" episode.saveToDB() @@ -126,7 +126,7 @@ def test_get_episode(): """ Test get episodes """ - show = TVShow(1, 0001, "en") + show = TVShow(1, 1, "en") show.name = "show name" show.network = "cbs" show.genre = "crime" From f60460df5524ecdd8461819d862c82dfa93db9ac Mon Sep 17 00:00:00 2001 From: Labrys Date: Sat, 25 Jun 2016 08:28:51 -0400 Subject: [PATCH 110/134] PY3 has no long type --- sickrage/helper/common.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/sickrage/helper/common.py b/sickrage/helper/common.py index 4361d24d10..aba7f8b972 100644 --- a/sickrage/helper/common.py +++ b/sickrage/helper/common.py @@ -24,12 +24,15 @@ import logging import traceback -from six import text_type +from six import text_type, PY3 import sickbeard logger = logging.getLogger(__name__) +if PY3: + long = int + dateFormat = '%Y-%m-%d' dateTimeFormat = '%Y-%m-%d %H:%M:%S' # Mapping HTTP status codes to official W3C names From b9dff1eee723391307cbf23bfa80c43b67fa649f Mon Sep 17 00:00:00 2001 From: Labrys Date: Sat, 25 Jun 2016 21:38:15 -0400 Subject: [PATCH 111/134] PY3 does not support a bare raise --- sickbeard/config.py | 6 +++--- sickbeard/providers/newznab.py | 2 +- sickbeard/rssfeeds.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/sickbeard/config.py b/sickbeard/config.py index 9771d6f4c0..e62fc7213e 100644 --- a/sickbeard/config.py +++ b/sickbeard/config.py @@ -536,7 +536,7 @@ def check_setting_int(config, cfg_name, item_name, def_val, silent=True): my_val = int(my_val) if str(my_val) == str(None): - raise + raise Exception except Exception: my_val = def_val try: @@ -558,7 +558,7 @@ def check_setting_float(config, cfg_name, item_name, def_val, silent=True): try: my_val = float(config[cfg_name][item_name]) if str(my_val) == str(None): - raise + raise Exception except Exception: my_val = def_val try: @@ -591,7 +591,7 @@ def check_setting_str(config, cfg_name, item_name, def_val, silent=True, censor_ try: my_val = helpers.decrypt(config[cfg_name][item_name], encryption_version) if str(my_val) == str(None): - raise + raise Exception except Exception: my_val = def_val try: diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index 64397e4c43..2734154745 100644 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -235,7 +235,7 @@ def _check_auth_from_data(self, data): try: err_desc = data.error.attrs['description'] if not err_desc: - raise + raise Exception except (AttributeError, TypeError): return self._check_auth() diff --git a/sickbeard/rssfeeds.py b/sickbeard/rssfeeds.py index 3fa4bbb590..c0822bcdb9 100644 --- a/sickbeard/rssfeeds.py +++ b/sickbeard/rssfeeds.py @@ -10,7 +10,7 @@ def getFeed(url, params=None, request_hook=None): try: data = request_hook(url, params=params, returns='text', timeout=30) if not data: - raise + raise Exception feed = parse(data, response_headers={'content-type': 'application/xml'}) if feed: From 20903813a09eaab3dbf89e097ea38433163bb8cd Mon Sep 17 00:00:00 2001 From: Labrys Date: Sat, 25 Jun 2016 21:45:00 -0400 Subject: [PATCH 112/134] PY3 has no file method --- sickbeard/helpers.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py index 44918461dd..e5ec5b96a9 100644 --- a/sickbeard/helpers.py +++ b/sickbeard/helpers.py @@ -1226,10 +1226,9 @@ def extractZip(archive, targetDir): # copy file (taken from zipfile's extract) source = zip_file.open(member) - target = file(ek(os.path.join, targetDir, filename), "wb") - shutil.copyfileobj(source, target) - source.close() - target.close() + with open(ek(os.path.join, targetDir, filename), "wb") as target: + shutil.copyfileobj(source, target) + source.close() zip_file.close() return True except Exception as e: From 8fc48e4d9b17dc63cb8edb463cee0fa7e55fc395 Mon Sep 17 00:00:00 2001 From: labrys Date: Tue, 21 Jun 2016 04:37:13 -0400 Subject: [PATCH 113/134] Fix consecutive if-statements --- sickbeard/name_parser/parser.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/sickbeard/name_parser/parser.py b/sickbeard/name_parser/parser.py index 61a82d3123..e1e0f6359e 100644 --- a/sickbeard/name_parser/parser.py +++ b/sickbeard/name_parser/parser.py @@ -364,10 +364,10 @@ def _combine_results(first, second, attr): @staticmethod def _unicodify(obj, encoding="utf-8"): - if isinstance(obj, string_types): - if not isinstance(obj, text_type): - obj = text_type(obj, encoding, 'replace') - return obj + if isinstance(obj, string_types) and not isinstance(obj, text_type): + return text_type(obj, encoding, 'replace') + else: + return obj @staticmethod def _convert_number(org_number): From cfaca801e8a1ec4d3f850ff9188952889f1d1c85 Mon Sep 17 00:00:00 2001 From: labrys Date: Tue, 21 Jun 2016 07:32:30 -0400 Subject: [PATCH 114/134] Rewrite result categorization --- sickrage/providers/GenericProvider.py | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/sickrage/providers/GenericProvider.py b/sickrage/providers/GenericProvider.py index de3f723338..bfe7439f66 100644 --- a/sickrage/providers/GenericProvider.py +++ b/sickrage/providers/GenericProvider.py @@ -21,6 +21,7 @@ import re import sickbeard +from collections import defaultdict from base64 import b16encode, b32decode from datetime import datetime from itertools import chain @@ -160,21 +161,22 @@ def find_search_results(self, show, episodes, search_mode, forced_search=False, return results if items_list: - items = {} - unknown_items = [] - + # categorize the items into lists by quality + items = defaultdict(list) for item in items_list: - quality = self.get_quality(item, anime=show.is_anime) + items[self.get_quality(item, anime=show.is_anime)].append(item) - if quality == Quality.UNKNOWN: - unknown_items.append(item) - else: - if quality not in items: - items[quality] = [] - items[quality].append(item) + # temporarily remove the list of items with unknown quality + unknown_items = items.pop(Quality.UNKNOWN, []) + + # make a generator to sort the remaining items by descending quality + items_list = (items[quality] for quality in sorted(items, reverse=True)) + + # unpack all of the quality lists into a single sorted list + items_list = list(chain(*items_list)) - items_list = list(chain(*[v for (_, v) in sorted(items.iteritems(), reverse=True)])) - items_list += unknown_items + # extend the list with the unknown qualities, now sorted at the bottom of the list + items_list.extend(unknown_items) cl = [] From 29550658bb7509b2f164128f492933a00af0a24c Mon Sep 17 00:00:00 2001 From: Labrys Date: Mon, 27 Jun 2016 14:42:23 -0400 Subject: [PATCH 115/134] Fix checkVersion getURL to return response and not text --- sickbeard/versionChecker.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sickbeard/versionChecker.py b/sickbeard/versionChecker.py index 80a8f65df0..6af0cdbb41 100644 --- a/sickbeard/versionChecker.py +++ b/sickbeard/versionChecker.py @@ -220,11 +220,11 @@ def getDBcompare(self): check_url = 'http://cdn.rawgit.com/{org}/{repo}/{commit}/sickbeard/databases/main_db.py'.format( org=sickbeard.GIT_ORG, repo=sickbeard.GIT_REPO, commit=cur_hash) - response = helpers.getURL(check_url, session=self.session) + response = helpers.getURL(check_url, session=self.session, returns='response') if response.status_code == 404: check_url.replace('main_db.py', 'mainDB.py') - response = helpers.getURL(check_url, session=self.session) + response = helpers.getURL(check_url, session=self.session, returns='response') match = re.search(r'MAX_DB_VERSION\s=\s(?P\d{2,3})', response.text) new_branch_db_version = int(match.group('version')) From 8fb13faf9b63c1875e895d6867e53a69af6b6a35 Mon Sep 17 00:00:00 2001 From: Labrys of Knossos Date: Tue, 28 Jun 2016 08:12:48 -0400 Subject: [PATCH 116/134] Ui cleanup (#689) * Set base href to sickbeard.WEB_ROOT * Use rgb function for color * Use `rgb()` instead of `#HEX` for consistency with `rgba()` * Insert missing semicolons * Remove redundant px unit * Add generic default font-family * Remove duplicate properties * Replace trinary statements with if * Fix bug in value * Clean up logic * Fix broken tags * Remove excess whitespace * Make
      self-closing * Fix deprecated tags * Clean up snatchSelection * * Clean up main.mako, header, footer, and partials * Make memory usage cross-platform with optional dependency `psutil` * Remove invalid attributes * Replace tabs with spaces --- gui/slick/css/browser.css | 2 +- gui/slick/css/country-flags.css | 755 ++-- gui/slick/css/dark.css | 427 ++- gui/slick/css/light.css | 94 +- gui/slick/css/style.css | 1013 ++--- gui/slick/js/addShowOptions.js | 6 +- gui/slick/js/qualityChooser.js | 4 +- gui/slick/views/500.mako | 9 +- gui/slick/views/IRC.mako | 2 +- gui/slick/views/addShows.mako | 21 +- gui/slick/views/addShows_addExistingShow.mako | 21 +- gui/slick/views/addShows_newShow.mako | 41 +- gui/slick/views/addShows_popularShows.mako | 56 +- gui/slick/views/addShows_trendingShows.mako | 20 +- gui/slick/views/apiBuilder.mako | 102 +- gui/slick/views/config.mako | 6 +- gui/slick/views/config_anime.mako | 165 +- gui/slick/views/config_backuprestore.mako | 28 +- gui/slick/views/config_general.mako | 226 +- gui/slick/views/config_notifications.mako | 3312 ++++++++--------- gui/slick/views/config_postProcessing.mako | 1837 +++++---- gui/slick/views/config_providers.mako | 184 +- gui/slick/views/config_search.mako | 149 +- gui/slick/views/config_subtitles.mako | 74 +- gui/slick/views/displayShow.mako | 134 +- gui/slick/views/editShow.mako | 70 +- gui/slick/views/history.mako | 57 +- gui/slick/views/home.mako | 27 +- gui/slick/views/home_massAddTable.mako | 12 +- gui/slick/views/home_postprocess.mako | 6 +- gui/slick/views/inc_addShowOptions.mako | 25 +- gui/slick/views/inc_blackwhitelist.mako | 15 +- gui/slick/views/inc_defs.mako | 5 - gui/slick/views/inc_qualityChooser.mako | 16 +- gui/slick/views/inc_rootDirs.mako | 5 +- gui/slick/views/layouts/main.mako | 224 +- gui/slick/views/manage.mako | 39 +- gui/slick/views/manage_backlogOverview.mako | 20 +- gui/slick/views/manage_episodeStatuses.mako | 33 +- gui/slick/views/manage_failedDownloads.mako | 16 +- gui/slick/views/manage_manageSearches.mako | 55 +- gui/slick/views/manage_massEdit.mako | 117 +- gui/slick/views/manage_subtitleMissed.mako | 21 +- gui/slick/views/manage_torrents.mako | 5 +- gui/slick/views/partials/alerts.mako | 15 + gui/slick/views/partials/footer.mako | 87 +- gui/slick/views/partials/header.mako | 112 +- gui/slick/views/partials/home/banner.mako | 30 +- gui/slick/views/partials/home/poster.mako | 30 +- gui/slick/views/partials/home/simple.mako | 24 +- gui/slick/views/partials/home/small.mako | 32 +- gui/slick/views/partials/submenu.mako | 26 + gui/slick/views/restart.mako | 17 +- gui/slick/views/schedule.mako | 138 +- gui/slick/views/snatchSelection.mako | 816 ++-- gui/slick/views/status.mako | 5 +- gui/slick/views/testRename.mako | 16 +- gui/slick/views/trendingShows.mako | 14 +- gui/slick/views/viewlogs.mako | 92 +- setup.py | 3 + sickbeard/server/web/home/add_shows.py | 2 +- sickbeard/show_name_helpers.py | 6 +- sickrage/helper/common.py | 4 +- 63 files changed, 5333 insertions(+), 5592 deletions(-) create mode 100644 gui/slick/views/partials/alerts.mako create mode 100644 gui/slick/views/partials/submenu.mako diff --git a/gui/slick/css/browser.css b/gui/slick/css/browser.css index bdc1f3ab3d..56cd697714 100644 --- a/gui/slick/css/browser.css +++ b/gui/slick/css/browser.css @@ -20,7 +20,7 @@ } .fileBrowserDialog ul li a:hover { - color: #00f; + color: rgb(0, 0, 255); background: none; } diff --git a/gui/slick/css/country-flags.css b/gui/slick/css/country-flags.css index 44ca6e1976..1f9a424964 100644 --- a/gui/slick/css/country-flags.css +++ b/gui/slick/css/country-flags.css @@ -1,257 +1,508 @@ .country-flag { width: 16px; height: 11px; - background:url(../images/country-flags.png) no-repeat + background:url(../images/country-flags.png) no-repeat; } -.country-flag.flag-ad {background-position: -16px 0} -.country-flag.flag-ae {background-position: -32px 0} -.country-flag.flag-af {background-position: -48px 0} -.country-flag.flag-ag {background-position: -64px 0} -.country-flag.flag-ai {background-position: -80px 0} -.country-flag.flag-al {background-position: -96px 0} -.country-flag.flag-am {background-position: -112px 0} -.country-flag.flag-an {background-position: -128px 0} -.country-flag.flag-ao {background-position: -144px 0} -.country-flag.flag-ar {background-position: -160px 0} -.country-flag.flag-as {background-position: -176px 0} -.country-flag.flag-at {background-position: -192px 0} -.country-flag.flag-au {background-position: -208px 0} -.country-flag.flag-aw {background-position: -224px 0} -.country-flag.flag-az {background-position: -240px 0} -.country-flag.flag-ba {background-position: 0 -11px} -.country-flag.flag-bb {background-position: -16px -11px} -.country-flag.flag-bd {background-position: -32px -11px} -.country-flag.flag-be {background-position: -48px -11px} -.country-flag.flag-bf {background-position: -64px -11px} -.country-flag.flag-bg {background-position: -80px -11px} -.country-flag.flag-bh {background-position: -96px -11px} -.country-flag.flag-bi {background-position: -112px -11px} -.country-flag.flag-bj {background-position: -128px -11px} -.country-flag.flag-bm {background-position: -144px -11px} -.country-flag.flag-bn {background-position: -160px -11px} -.country-flag.flag-bo {background-position: -176px -11px} -.country-flag.flag-br {background-position: -192px -11px} -.country-flag.flag-bs {background-position: -208px -11px} -.country-flag.flag-bt {background-position: -224px -11px} -.country-flag.flag-bv {background-position: -240px -11px} -.country-flag.flag-bw {background-position: 0 -22px} -.country-flag.flag-by {background-position: -16px -22px} -.country-flag.flag-bz {background-position: -32px -22px} -.country-flag.flag-ca {background-position: -48px -22px} -.country-flag.flag-catalonia {background-position: -64px -22px} -.country-flag.flag-cd {background-position: -80px -22px} -.country-flag.flag-cf {background-position: -96px -22px} -.country-flag.flag-cg {background-position: -112px -22px} -.country-flag.flag-ch {background-position: -128px -22px} -.country-flag.flag-ci {background-position: -144px -22px} -.country-flag.flag-ck {background-position: -160px -22px} -.country-flag.flag-cl {background-position: -176px -22px} -.country-flag.flag-cm {background-position: -192px -22px} -.country-flag.flag-cn {background-position: -208px -22px} -.country-flag.flag-co {background-position: -224px -22px} -.country-flag.flag-cr {background-position: -240px -22px} -.country-flag.flag-cu {background-position: 0 -33px} -.country-flag.flag-cv {background-position: -16px -33px} -.country-flag.flag-cw {background-position: -32px -33px} -.country-flag.flag-cy {background-position: -48px -33px} -.country-flag.flag-cz {background-position: -64px -33px} -.country-flag.flag-de {background-position: -80px -33px} -.country-flag.flag-dj {background-position: -96px -33px} -.country-flag.flag-dk {background-position: -112px -33px} -.country-flag.flag-dm {background-position: -128px -33px} -.country-flag.flag-do {background-position: -144px -33px} -.country-flag.flag-dz {background-position: -160px -33px} -.country-flag.flag-ec {background-position: -176px -33px} -.country-flag.flag-ee {background-position: -192px -33px} -.country-flag.flag-eg {background-position: -208px -33px} -.country-flag.flag-eh {background-position: -224px -33px} -.country-flag.flag-england {background-position: -240px -33px} -.country-flag.flag-er {background-position: 0 -44px} -.country-flag.flag-es {background-position: -16px -44px} -.country-flag.flag-et {background-position: -32px -44px} -.country-flag.flag-eu {background-position: -48px -44px} -.country-flag.flag-fi {background-position: -64px -44px} -.country-flag.flag-fj {background-position: -80px -44px} -.country-flag.flag-fk {background-position: -96px -44px} -.country-flag.flag-fm {background-position: -112px -44px} -.country-flag.flag-fo {background-position: -128px -44px} -.country-flag.flag-fr {background-position: -144px -44px} -.country-flag.flag-ga {background-position: -160px -44px} -.country-flag.flag-gb {background-position: -176px -44px} -.country-flag.flag-gd {background-position: -192px -44px} -.country-flag.flag-ge {background-position: -208px -44px} -.country-flag.flag-gf {background-position: -224px -44px} -.country-flag.flag-gg {background-position: -240px -44px} -.country-flag.flag-gh {background-position: 0 -55px} -.country-flag.flag-gi {background-position: -16px -55px} -.country-flag.flag-gl {background-position: -32px -55px} -.country-flag.flag-gm {background-position: -48px -55px} -.country-flag.flag-gn {background-position: -64px -55px} -.country-flag.flag-gp {background-position: -80px -55px} -.country-flag.flag-gq {background-position: -96px -55px} -.country-flag.flag-gr {background-position: -112px -55px} -.country-flag.flag-gs {background-position: -128px -55px} -.country-flag.flag-gt {background-position: -144px -55px} -.country-flag.flag-gu {background-position: -160px -55px} -.country-flag.flag-gw {background-position: -176px -55px} -.country-flag.flag-gy {background-position: -192px -55px} -.country-flag.flag-hk {background-position: -208px -55px} -.country-flag.flag-hm {background-position: -224px -55px} -.country-flag.flag-hn {background-position: -240px -55px} -.country-flag.flag-hr {background-position: 0 -66px} -.country-flag.flag-ht {background-position: -16px -66px} -.country-flag.flag-hu {background-position: -32px -66px} -.country-flag.flag-ic {background-position: -48px -66px} -.country-flag.flag-id {background-position: -64px -66px} -.country-flag.flag-ie {background-position: -80px -66px} -.country-flag.flag-il {background-position: -96px -66px} -.country-flag.flag-im {background-position: -112px -66px} -.country-flag.flag-in {background-position: -128px -66px} -.country-flag.flag-io {background-position: -144px -66px} -.country-flag.flag-iq {background-position: -160px -66px} -.country-flag.flag-ir {background-position: -176px -66px} -.country-flag.flag-is {background-position: -192px -66px} -.country-flag.flag-it {background-position: -208px -66px} -.country-flag.flag-je {background-position: -224px -66px} -.country-flag.flag-jm {background-position: -240px -66px} -.country-flag.flag-jo {background-position: 0 -77px} -.country-flag.flag-jp {background-position: -16px -77px} -.country-flag.flag-ke {background-position: -32px -77px} -.country-flag.flag-kg {background-position: -48px -77px} -.country-flag.flag-kh {background-position: -64px -77px} -.country-flag.flag-ki {background-position: -80px -77px} -.country-flag.flag-km {background-position: -96px -77px} -.country-flag.flag-kn {background-position: -112px -77px} -.country-flag.flag-kp {background-position: -128px -77px} -.country-flag.flag-kr {background-position: -144px -77px} -.country-flag.flag-kurdistan {background-position: -160px -77px} -.country-flag.flag-kw {background-position: -176px -77px} -.country-flag.flag-ky {background-position: -192px -77px} -.country-flag.flag-kz {background-position: -208px -77px} -.country-flag.flag-la {background-position: -224px -77px} -.country-flag.flag-lb {background-position: -240px -77px} -.country-flag.flag-lc {background-position: 0 -88px} -.country-flag.flag-li {background-position: -16px -88px} -.country-flag.flag-lk {background-position: -32px -88px} -.country-flag.flag-lr {background-position: -48px -88px} -.country-flag.flag-ls {background-position: -64px -88px} -.country-flag.flag-lt {background-position: -80px -88px} -.country-flag.flag-lu {background-position: -96px -88px} -.country-flag.flag-lv {background-position: -112px -88px} -.country-flag.flag-ly {background-position: -128px -88px} -.country-flag.flag-ma {background-position: -144px -88px} -.country-flag.flag-mc {background-position: -160px -88px} -.country-flag.flag-md {background-position: -176px -88px} -.country-flag.flag-me {background-position: -192px -88px} -.country-flag.flag-mg {background-position: -208px -88px} -.country-flag.flag-mh {background-position: -224px -88px} -.country-flag.flag-mk {background-position: -240px -88px} -.country-flag.flag-ml {background-position: 0 -99px} -.country-flag.flag-mm {background-position: -16px -99px} -.country-flag.flag-mn {background-position: -32px -99px} -.country-flag.flag-mo {background-position: -48px -99px} -.country-flag.flag-mp {background-position: -64px -99px} -.country-flag.flag-mq {background-position: -80px -99px} -.country-flag.flag-mr {background-position: -96px -99px} -.country-flag.flag-ms {background-position: -112px -99px} -.country-flag.flag-mt {background-position: -128px -99px} -.country-flag.flag-mu {background-position: -144px -99px} -.country-flag.flag-mv {background-position: -160px -99px} -.country-flag.flag-mw {background-position: -176px -99px} -.country-flag.flag-mx {background-position: -192px -99px} -.country-flag.flag-my {background-position: -208px -99px} -.country-flag.flag-mz {background-position: -224px -99px} -.country-flag.flag-na {background-position: -240px -99px} -.country-flag.flag-nc {background-position: 0 -110px} -.country-flag.flag-ne {background-position: -16px -110px} -.country-flag.flag-nf {background-position: -32px -110px} -.country-flag.flag-ng {background-position: -48px -110px} -.country-flag.flag-ni {background-position: -64px -110px} -.country-flag.flag-nl {background-position: -80px -110px} -.country-flag.flag-no {background-position: -96px -110px} -.country-flag.flag-np {background-position: -112px -110px} -.country-flag.flag-nr {background-position: -128px -110px} -.country-flag.flag-nu {background-position: -144px -110px} -.country-flag.flag-nz {background-position: -160px -110px} -.country-flag.flag-om {background-position: -176px -110px} -.country-flag.flag-pa {background-position: -192px -110px} -.country-flag.flag-pe {background-position: -208px -110px} -.country-flag.flag-pf {background-position: -224px -110px} -.country-flag.flag-pg {background-position: -240px -110px} -.country-flag.flag-ph {background-position: 0 -121px} -.country-flag.flag-pk {background-position: -16px -121px} -.country-flag.flag-pl {background-position: -32px -121px} -.country-flag.flag-pm {background-position: -48px -121px} -.country-flag.flag-pn {background-position: -64px -121px} -.country-flag.flag-pr {background-position: -80px -121px} -.country-flag.flag-ps {background-position: -96px -121px} -.country-flag.flag-pt {background-position: -112px -121px} -.country-flag.flag-pw {background-position: -128px -121px} -.country-flag.flag-py {background-position: -144px -121px} -.country-flag.flag-qa {background-position: -160px -121px} -.country-flag.flag-re {background-position: -176px -121px} -.country-flag.flag-ro {background-position: -192px -121px} -.country-flag.flag-rs {background-position: -208px -121px} -.country-flag.flag-ru {background-position: -224px -121px} -.country-flag.flag-rw {background-position: -240px -121px} -.country-flag.flag-sa {background-position: 0 -132px} -.country-flag.flag-sb {background-position: -16px -132px} -.country-flag.flag-sc {background-position: -32px -132px} -.country-flag.flag-scotland {background-position: -48px -132px} -.country-flag.flag-sd {background-position: -64px -132px} -.country-flag.flag-se {background-position: -80px -132px} -.country-flag.flag-sg {background-position: -96px -132px} -.country-flag.flag-sh {background-position: -112px -132px} -.country-flag.flag-si {background-position: -128px -132px} -.country-flag.flag-sk {background-position: -144px -132px} -.country-flag.flag-sl {background-position: -160px -132px} -.country-flag.flag-sm {background-position: -176px -132px} -.country-flag.flag-sn {background-position: -192px -132px} -.country-flag.flag-so {background-position: -208px -132px} -.country-flag.flag-somaliland {background-position: -224px -132px} -.country-flag.flag-sr {background-position: -240px -132px} -.country-flag.flag-ss {background-position: 0 -143px} -.country-flag.flag-st {background-position: -16px -143px} -.country-flag.flag-sv {background-position: -32px -143px} -.country-flag.flag-sx {background-position: -48px -143px} -.country-flag.flag-sy {background-position: -64px -143px} -.country-flag.flag-sz {background-position: -80px -143px} -.country-flag.flag-tc {background-position: -96px -143px} -.country-flag.flag-td {background-position: -112px -143px} -.country-flag.flag-tf {background-position: -128px -143px} -.country-flag.flag-tg {background-position: -144px -143px} -.country-flag.flag-th {background-position: -160px -143px} -.country-flag.flag-tj {background-position: -176px -143px} -.country-flag.flag-tk {background-position: -192px -143px} -.country-flag.flag-tl {background-position: -208px -143px} -.country-flag.flag-tm {background-position: -224px -143px} -.country-flag.flag-tn {background-position: -240px -143px} -.country-flag.flag-to {background-position: 0 -154px} -.country-flag.flag-tr {background-position: -16px -154px} -.country-flag.flag-tt {background-position: -32px -154px} -.country-flag.flag-tv {background-position: -48px -154px} -.country-flag.flag-tw {background-position: -64px -154px} -.country-flag.flag-tz {background-position: -80px -154px} -.country-flag.flag-ua {background-position: -96px -154px} -.country-flag.flag-ug {background-position: -112px -154px} -.country-flag.flag-um {background-position: -128px -154px} -.country-flag.flag-us {background-position: -144px -154px} -.country-flag.flag-uy {background-position: -160px -154px} -.country-flag.flag-uz {background-position: -176px -154px} -.country-flag.flag-va {background-position: -192px -154px} -.country-flag.flag-vc {background-position: -208px -154px} -.country-flag.flag-ve {background-position: -224px -154px} -.country-flag.flag-vg {background-position: -240px -154px} -.country-flag.flag-vi {background-position: 0 -165px} -.country-flag.flag-vn {background-position: -16px -165px} -.country-flag.flag-vu {background-position: -32px -165px} -.country-flag.flag-wales {background-position: -48px -165px} -.country-flag.flag-wf {background-position: -64px -165px} -.country-flag.flag-ws {background-position: -80px -165px} -.country-flag.flag-ye {background-position: -96px -165px} -.country-flag.flag-yt {background-position: -112px -165px} -.country-flag.flag-za {background-position: -128px -165px} -.country-flag.flag-zanzibar {background-position: -144px -165px} -.country-flag.flag-zm {background-position: -160px -165px} -.country-flag.flag-zw {background-position: -176px -165px} +.country-flag.flag-ad {background-position: -16px 0; +} +.country-flag.flag-ae {background-position: -32px 0; +} +.country-flag.flag-af {background-position: -48px 0; +} +.country-flag.flag-ag {background-position: -64px 0; +} +.country-flag.flag-ai {background-position: -80px 0; +} +.country-flag.flag-al {background-position: -96px 0; +} +.country-flag.flag-am {background-position: -112px 0; +} +.country-flag.flag-an {background-position: -128px 0; +} +.country-flag.flag-ao {background-position: -144px 0; +} +.country-flag.flag-ar {background-position: -160px 0; +} +.country-flag.flag-as {background-position: -176px 0; +} +.country-flag.flag-at {background-position: -192px 0; +} +.country-flag.flag-au {background-position: -208px 0; +} +.country-flag.flag-aw {background-position: -224px 0; +} +.country-flag.flag-az {background-position: -240px 0; +} +.country-flag.flag-ba {background-position: 0 -11px; +} +.country-flag.flag-bb {background-position: -16px -11px; +} +.country-flag.flag-bd {background-position: -32px -11px; +} +.country-flag.flag-be {background-position: -48px -11px; +} +.country-flag.flag-bf {background-position: -64px -11px; +} +.country-flag.flag-bg {background-position: -80px -11px; +} +.country-flag.flag-bh {background-position: -96px -11px; +} +.country-flag.flag-bi {background-position: -112px -11px; +} +.country-flag.flag-bj {background-position: -128px -11px; +} +.country-flag.flag-bm {background-position: -144px -11px; +} +.country-flag.flag-bn {background-position: -160px -11px; +} +.country-flag.flag-bo {background-position: -176px -11px; +} +.country-flag.flag-br {background-position: -192px -11px; +} +.country-flag.flag-bs {background-position: -208px -11px; +} +.country-flag.flag-bt {background-position: -224px -11px; +} +.country-flag.flag-bv {background-position: -240px -11px; +} +.country-flag.flag-bw {background-position: 0 -22px; +} +.country-flag.flag-by {background-position: -16px -22px; +} +.country-flag.flag-bz {background-position: -32px -22px; +} +.country-flag.flag-ca {background-position: -48px -22px; +} +.country-flag.flag-catalonia {background-position: -64px -22px; +} +.country-flag.flag-cd {background-position: -80px -22px; +} +.country-flag.flag-cf {background-position: -96px -22px; +} +.country-flag.flag-cg {background-position: -112px -22px; +} +.country-flag.flag-ch {background-position: -128px -22px; +} +.country-flag.flag-ci {background-position: -144px -22px; +} +.country-flag.flag-ck {background-position: -160px -22px; +} +.country-flag.flag-cl {background-position: -176px -22px; +} +.country-flag.flag-cm {background-position: -192px -22px; +} +.country-flag.flag-cn {background-position: -208px -22px; +} +.country-flag.flag-co {background-position: -224px -22px; +} +.country-flag.flag-cr {background-position: -240px -22px; +} +.country-flag.flag-cu {background-position: 0 -33px; +} +.country-flag.flag-cv {background-position: -16px -33px; +} +.country-flag.flag-cw {background-position: -32px -33px; +} +.country-flag.flag-cy {background-position: -48px -33px; +} +.country-flag.flag-cz {background-position: -64px -33px; +} +.country-flag.flag-de {background-position: -80px -33px; +} +.country-flag.flag-dj {background-position: -96px -33px; +} +.country-flag.flag-dk {background-position: -112px -33px; +} +.country-flag.flag-dm {background-position: -128px -33px; +} +.country-flag.flag-do {background-position: -144px -33px; +} +.country-flag.flag-dz {background-position: -160px -33px; +} +.country-flag.flag-ec {background-position: -176px -33px; +} +.country-flag.flag-ee {background-position: -192px -33px; +} +.country-flag.flag-eg {background-position: -208px -33px; +} +.country-flag.flag-eh {background-position: -224px -33px; +} +.country-flag.flag-england {background-position: -240px -33px; +} +.country-flag.flag-er {background-position: 0 -44px; +} +.country-flag.flag-es {background-position: -16px -44px; +} +.country-flag.flag-et {background-position: -32px -44px; +} +.country-flag.flag-eu {background-position: -48px -44px; +} +.country-flag.flag-fi {background-position: -64px -44px; +} +.country-flag.flag-fj {background-position: -80px -44px; +} +.country-flag.flag-fk {background-position: -96px -44px; +} +.country-flag.flag-fm {background-position: -112px -44px; +} +.country-flag.flag-fo {background-position: -128px -44px; +} +.country-flag.flag-fr {background-position: -144px -44px; +} +.country-flag.flag-ga {background-position: -160px -44px; +} +.country-flag.flag-gb {background-position: -176px -44px; +} +.country-flag.flag-gd {background-position: -192px -44px; +} +.country-flag.flag-ge {background-position: -208px -44px; +} +.country-flag.flag-gf {background-position: -224px -44px; +} +.country-flag.flag-gg {background-position: -240px -44px; +} +.country-flag.flag-gh {background-position: 0 -55px; +} +.country-flag.flag-gi {background-position: -16px -55px; +} +.country-flag.flag-gl {background-position: -32px -55px; +} +.country-flag.flag-gm {background-position: -48px -55px; +} +.country-flag.flag-gn {background-position: -64px -55px; +} +.country-flag.flag-gp {background-position: -80px -55px; +} +.country-flag.flag-gq {background-position: -96px -55px; +} +.country-flag.flag-gr {background-position: -112px -55px; +} +.country-flag.flag-gs {background-position: -128px -55px; +} +.country-flag.flag-gt {background-position: -144px -55px; +} +.country-flag.flag-gu {background-position: -160px -55px; +} +.country-flag.flag-gw {background-position: -176px -55px; +} +.country-flag.flag-gy {background-position: -192px -55px; +} +.country-flag.flag-hk {background-position: -208px -55px; +} +.country-flag.flag-hm {background-position: -224px -55px; +} +.country-flag.flag-hn {background-position: -240px -55px; +} +.country-flag.flag-hr {background-position: 0 -66px; +} +.country-flag.flag-ht {background-position: -16px -66px; +} +.country-flag.flag-hu {background-position: -32px -66px; +} +.country-flag.flag-ic {background-position: -48px -66px; +} +.country-flag.flag-id {background-position: -64px -66px; +} +.country-flag.flag-ie {background-position: -80px -66px; +} +.country-flag.flag-il {background-position: -96px -66px; +} +.country-flag.flag-im {background-position: -112px -66px; +} +.country-flag.flag-in {background-position: -128px -66px; +} +.country-flag.flag-io {background-position: -144px -66px; +} +.country-flag.flag-iq {background-position: -160px -66px; +} +.country-flag.flag-ir {background-position: -176px -66px; +} +.country-flag.flag-is {background-position: -192px -66px; +} +.country-flag.flag-it {background-position: -208px -66px; +} +.country-flag.flag-je {background-position: -224px -66px; +} +.country-flag.flag-jm {background-position: -240px -66px; +} +.country-flag.flag-jo {background-position: 0 -77px; +} +.country-flag.flag-jp {background-position: -16px -77px; +} +.country-flag.flag-ke {background-position: -32px -77px; +} +.country-flag.flag-kg {background-position: -48px -77px; +} +.country-flag.flag-kh {background-position: -64px -77px; +} +.country-flag.flag-ki {background-position: -80px -77px; +} +.country-flag.flag-km {background-position: -96px -77px; +} +.country-flag.flag-kn {background-position: -112px -77px; +} +.country-flag.flag-kp {background-position: -128px -77px; +} +.country-flag.flag-kr {background-position: -144px -77px; +} +.country-flag.flag-kurdistan {background-position: -160px -77px; +} +.country-flag.flag-kw {background-position: -176px -77px; +} +.country-flag.flag-ky {background-position: -192px -77px; +} +.country-flag.flag-kz {background-position: -208px -77px; +} +.country-flag.flag-la {background-position: -224px -77px; +} +.country-flag.flag-lb {background-position: -240px -77px; +} +.country-flag.flag-lc {background-position: 0 -88px; +} +.country-flag.flag-li {background-position: -16px -88px; +} +.country-flag.flag-lk {background-position: -32px -88px; +} +.country-flag.flag-lr {background-position: -48px -88px; +} +.country-flag.flag-ls {background-position: -64px -88px; +} +.country-flag.flag-lt {background-position: -80px -88px; +} +.country-flag.flag-lu {background-position: -96px -88px; +} +.country-flag.flag-lv {background-position: -112px -88px; +} +.country-flag.flag-ly {background-position: -128px -88px; +} +.country-flag.flag-ma {background-position: -144px -88px; +} +.country-flag.flag-mc {background-position: -160px -88px; +} +.country-flag.flag-md {background-position: -176px -88px; +} +.country-flag.flag-me {background-position: -192px -88px; +} +.country-flag.flag-mg {background-position: -208px -88px; +} +.country-flag.flag-mh {background-position: -224px -88px; +} +.country-flag.flag-mk {background-position: -240px -88px; +} +.country-flag.flag-ml {background-position: 0 -99px; +} +.country-flag.flag-mm {background-position: -16px -99px; +} +.country-flag.flag-mn {background-position: -32px -99px; +} +.country-flag.flag-mo {background-position: -48px -99px; +} +.country-flag.flag-mp {background-position: -64px -99px; +} +.country-flag.flag-mq {background-position: -80px -99px; +} +.country-flag.flag-mr {background-position: -96px -99px; +} +.country-flag.flag-ms {background-position: -112px -99px; +} +.country-flag.flag-mt {background-position: -128px -99px; +} +.country-flag.flag-mu {background-position: -144px -99px; +} +.country-flag.flag-mv {background-position: -160px -99px; +} +.country-flag.flag-mw {background-position: -176px -99px; +} +.country-flag.flag-mx {background-position: -192px -99px; +} +.country-flag.flag-my {background-position: -208px -99px; +} +.country-flag.flag-mz {background-position: -224px -99px; +} +.country-flag.flag-na {background-position: -240px -99px; +} +.country-flag.flag-nc {background-position: 0 -110px; +} +.country-flag.flag-ne {background-position: -16px -110px; +} +.country-flag.flag-nf {background-position: -32px -110px; +} +.country-flag.flag-ng {background-position: -48px -110px; +} +.country-flag.flag-ni {background-position: -64px -110px; +} +.country-flag.flag-nl {background-position: -80px -110px; +} +.country-flag.flag-no {background-position: -96px -110px; +} +.country-flag.flag-np {background-position: -112px -110px; +} +.country-flag.flag-nr {background-position: -128px -110px; +} +.country-flag.flag-nu {background-position: -144px -110px; +} +.country-flag.flag-nz {background-position: -160px -110px; +} +.country-flag.flag-om {background-position: -176px -110px; +} +.country-flag.flag-pa {background-position: -192px -110px; +} +.country-flag.flag-pe {background-position: -208px -110px; +} +.country-flag.flag-pf {background-position: -224px -110px; +} +.country-flag.flag-pg {background-position: -240px -110px; +} +.country-flag.flag-ph {background-position: 0 -121px; +} +.country-flag.flag-pk {background-position: -16px -121px; +} +.country-flag.flag-pl {background-position: -32px -121px; +} +.country-flag.flag-pm {background-position: -48px -121px; +} +.country-flag.flag-pn {background-position: -64px -121px; +} +.country-flag.flag-pr {background-position: -80px -121px; +} +.country-flag.flag-ps {background-position: -96px -121px; +} +.country-flag.flag-pt {background-position: -112px -121px; +} +.country-flag.flag-pw {background-position: -128px -121px; +} +.country-flag.flag-py {background-position: -144px -121px; +} +.country-flag.flag-qa {background-position: -160px -121px; +} +.country-flag.flag-re {background-position: -176px -121px; +} +.country-flag.flag-ro {background-position: -192px -121px; +} +.country-flag.flag-rs {background-position: -208px -121px; +} +.country-flag.flag-ru {background-position: -224px -121px; +} +.country-flag.flag-rw {background-position: -240px -121px; +} +.country-flag.flag-sa {background-position: 0 -132px; +} +.country-flag.flag-sb {background-position: -16px -132px; +} +.country-flag.flag-sc {background-position: -32px -132px; +} +.country-flag.flag-scotland {background-position: -48px -132px; +} +.country-flag.flag-sd {background-position: -64px -132px; +} +.country-flag.flag-se {background-position: -80px -132px; +} +.country-flag.flag-sg {background-position: -96px -132px; +} +.country-flag.flag-sh {background-position: -112px -132px; +} +.country-flag.flag-si {background-position: -128px -132px; +} +.country-flag.flag-sk {background-position: -144px -132px; +} +.country-flag.flag-sl {background-position: -160px -132px; +} +.country-flag.flag-sm {background-position: -176px -132px; +} +.country-flag.flag-sn {background-position: -192px -132px; +} +.country-flag.flag-so {background-position: -208px -132px; +} +.country-flag.flag-somaliland {background-position: -224px -132px; +} +.country-flag.flag-sr {background-position: -240px -132px; +} +.country-flag.flag-ss {background-position: 0 -143px; +} +.country-flag.flag-st {background-position: -16px -143px; +} +.country-flag.flag-sv {background-position: -32px -143px; +} +.country-flag.flag-sx {background-position: -48px -143px; +} +.country-flag.flag-sy {background-position: -64px -143px; +} +.country-flag.flag-sz {background-position: -80px -143px; +} +.country-flag.flag-tc {background-position: -96px -143px; +} +.country-flag.flag-td {background-position: -112px -143px; +} +.country-flag.flag-tf {background-position: -128px -143px; +} +.country-flag.flag-tg {background-position: -144px -143px; +} +.country-flag.flag-th {background-position: -160px -143px; +} +.country-flag.flag-tj {background-position: -176px -143px; +} +.country-flag.flag-tk {background-position: -192px -143px; +} +.country-flag.flag-tl {background-position: -208px -143px; +} +.country-flag.flag-tm {background-position: -224px -143px; +} +.country-flag.flag-tn {background-position: -240px -143px; +} +.country-flag.flag-to {background-position: 0 -154px; +} +.country-flag.flag-tr {background-position: -16px -154px; +} +.country-flag.flag-tt {background-position: -32px -154px; +} +.country-flag.flag-tv {background-position: -48px -154px; +} +.country-flag.flag-tw {background-position: -64px -154px; +} +.country-flag.flag-tz {background-position: -80px -154px; +} +.country-flag.flag-ua {background-position: -96px -154px; +} +.country-flag.flag-ug {background-position: -112px -154px; +} +.country-flag.flag-um {background-position: -128px -154px; +} +.country-flag.flag-us {background-position: -144px -154px; +} +.country-flag.flag-uy {background-position: -160px -154px; +} +.country-flag.flag-uz {background-position: -176px -154px; +} +.country-flag.flag-va {background-position: -192px -154px; +} +.country-flag.flag-vc {background-position: -208px -154px; +} +.country-flag.flag-ve {background-position: -224px -154px; +} +.country-flag.flag-vg {background-position: -240px -154px; +} +.country-flag.flag-vi {background-position: 0 -165px; +} +.country-flag.flag-vn {background-position: -16px -165px; +} +.country-flag.flag-vu {background-position: -32px -165px; +} +.country-flag.flag-wales {background-position: -48px -165px; +} +.country-flag.flag-wf {background-position: -64px -165px; +} +.country-flag.flag-ws {background-position: -80px -165px; +} +.country-flag.flag-ye {background-position: -96px -165px; +} +.country-flag.flag-yt {background-position: -112px -165px; +} +.country-flag.flag-za {background-position: -128px -165px; +} +.country-flag.flag-zanzibar {background-position: -144px -165px; +} +.country-flag.flag-zm {background-position: -160px -165px; +} +.country-flag.flag-zw {background-position: -176px -165px; +} diff --git a/gui/slick/css/dark.css b/gui/slick/css/dark.css index 3036a9bd53..402920c417 100644 --- a/gui/slick/css/dark.css +++ b/gui/slick/css/dark.css @@ -4,34 +4,34 @@ inc_top.mako .ui-dialog, .ui-dialog-buttonpane { - background: #2a2a2a !important; + background: rgb(42, 42, 42) !important; } .ui-widget-content { - background: #606060; - border: 1px solid #111; - color: #fff; + background: rgb(96, 96, 96); + border: 1px solid rgb(17, 17, 17); + color: rgb(255, 255, 255); } .ui-widget-content a { - color: #fff; + color: rgb(255, 255, 255); } .ui-widget-content a:hover { - color: #09A2FF; + color: rgb(9, 162, 255); text-decoration: none; } .ui-widget-header { - background: #3d3d3d; - border: 1px solid #111; - color: #fff; + background: rgb(61, 61, 61); + border: 1px solid rgb(17, 17, 17); + color: rgb(255, 255, 255); } .ui-state-default, .ui-widget-content .ui-state-default, .ui-widget-header .ui-state-default { - border: 1px solid #111; + border: 1px solid rgb(17, 17, 17); } .ui-state-hover, @@ -40,13 +40,13 @@ inc_top.mako .ui-state-focus, .ui-widget-content .ui-state-focus, .ui-widget-header .ui-state-focus { - background: #3d3d3d; + background: rgb(61, 61, 61); } .ui-state-active, .ui-widget-content .ui-state-active, .ui-widget-header .ui-state-active { - background: #3d3d3d; + background: rgb(61, 61, 61); } .ui-icon, @@ -59,31 +59,31 @@ inc_top.mako } .ui-widget-overlay { - background: #000000 url("../css/lib/images/ui-bg_flat_0_000000_40x100.png") 50% 50% repeat-x; + background: rgb(0, 0, 0) url("../css/lib/images/ui-bg_flat_0_000000_40x100.png") 50% 50% repeat-x; } .ui-state-default a, .ui-state-default a:link, .ui-state-default a:visited { - color: #fff; + color: rgb(255, 255, 255); text-decoration: none; } .ui-dialog .ui-dialog-titlebar-close { - background: #333; + background: rgb(51, 51, 51); } .ui-tabs { - padding: 0px; + padding: 0; background: none; - border-width: 0px; + border-width: 0; } .ui-tabs .ui-tabs-panel { - background-color: #3d3d3d !important; - border: 1px solid #111 !important; + background-color: rgb(61, 61, 61) !important; + border: 1px solid rgb(17, 17, 17) !important; } .ui-tabs-nav > :not(.ui-tabs-active){ - background: #333; + background: rgb(51, 51, 51); border-top-left-radius: 5px; border-top-right-radius: 5px; } @@ -95,13 +95,13 @@ inc_bottom.mako .footer { width: 100%; padding: 20px 0; - color: #fff; + color: rgb(255, 255, 255); text-align: center; font-size: 12px; } .footerhighlight { - color: #09A2FF; + color: rgb(9, 162, 255); display: inline; } @@ -110,10 +110,9 @@ home.mako ========================================================================== */ .imgbanner .banner { - border: 1px solid #111; + border: 1px solid rgb(17, 17, 17); overflow: hidden; height: 66px; - overflow: hidden; border-radius: 8px; vertical-align: top; width: 360px; @@ -128,7 +127,7 @@ home.mako border-radius: 3px; vertical-align: middle; width: 45px; - border: 1px solid #111; + border: 1px solid rgb(17, 17, 17); margin-right: 5px; } @@ -139,10 +138,10 @@ home.mako height: 100%; overflow: visible; text-align: center; - text-shadow: 0 0 0.1em #000; + text-shadow: 0 0 0.1em rgb(0, 0, 0); vertical-align: middle; font-size: 12px; - color: #fff; + color: rgb(255, 255, 255); } .loading-spinner { @@ -150,8 +149,8 @@ home.mako } .show-container { - background-color: #333; - border-color: #333; + background-color: rgb(51, 51, 51); + border-color: rgb(51, 51, 51); } .show-title:after { @@ -185,26 +184,26 @@ home.mako } td.tvShow a { - color: #fff; + color: rgb(255, 255, 255); text-decoration: none; } td.tvShow a:hover { cursor: pointer; - color: #09A2FF; + color: rgb(9, 162, 255); } .popover { margin-left: -50px; - background-color: #333; + background-color: rgb(51, 51, 51); } .popover-content { - background-color: #333; + background-color: rgb(51, 51, 51); } .popover.bottom .arrow:after { - border-bottom-color: #333; + border-bottom-color: rgb(51, 51, 51); } /* ======================================================================= @@ -220,8 +219,8 @@ home_newShow.mako padding: 8px; overflow: hidden; font-size: 14px; - background-color: #3d3d3d; - border: 1px solid #111; + background-color: rgb(61, 61, 61); + border: 1px solid rgb(17, 17, 17); } /* ======================================================================= @@ -233,7 +232,7 @@ ul#rootDirStaticList li { margin: 2px; list-style: none outside none; cursor: pointer; - background: #3d3d3d; + background: rgb(61, 61, 61); } /* ======================================================================= @@ -243,8 +242,8 @@ home_trendingShows.mako .traktContainer { margin: 12px; width: 188px; - background-color: #333; - border: 1px solid #111; + background-color: rgb(51, 51, 51); + border: 1px solid rgb(17, 17, 17); border-radius: 6px; } @@ -265,24 +264,24 @@ h1.title { line-height: 30px; text-align: left; text-rendering: optimizelegibility; - border-bottom: 1px solid #555; + border-bottom: 1px solid rgb(85, 85, 85); } ul.tags li { margin-right: 4px; margin-bottom: 5px; padding: 3px 4px 3px 25px; - background: url(../images/tag.png) no-repeat scroll 5px 4px #15528F; + background: url(../images/tag.png) no-repeat scroll 5px 4px rgb(21, 82, 143); border-radius: 3px; - border: 1px solid #111; - color: #FFF; + border: 1px solid rgb(17, 17, 17); + color: rgb(255, 255, 255); font: 14px/18px "Open Sans", "Helvetica Neue", Helvetica, Arial, Geneva, sans-serif; - text-shadow: 0px 1px rgba(0, 0, 0, 0.8); + text-shadow: 0 1px rgba(0, 0, 0, 0.8); float: left; } .tvshowImg { - border: 1px solid #111; + border: 1px solid rgb(17, 17, 17); border-radius: 5px; height: 311px; width: auto; @@ -291,8 +290,8 @@ ul.tags li { #summary { padding: 10px; - background-color: #3d3d3d; - border: 1px solid #111; + background-color: rgb(61, 61, 61); + border: 1px solid rgb(17, 17, 17); width: 100%; height: 250px; overflow: auto; @@ -307,27 +306,27 @@ ul.tags li { text-align: center; border: none; empty-cells: show; - color: #000; + color: rgb(0, 0, 0); } .sickbeardTable th{ - color: #fff; + color: rgb(255, 255, 255); text-align: center; - background-color: #15528F; + background-color: rgb(21, 82, 143); white-space: nowrap; } .sickbeardTable th, .sickbeardTable td { - border-top: 1px solid #222; - border-left: 1px solid #222; + border-top: 1px solid rgb(34, 34, 34); + border-left: 1px solid rgb(34, 34, 34); padding: 4px; } th.row-seasonheader { border: none; - background-color: #222; - color: #fff; + background-color: rgb(34, 34, 34); + color: rgb(255, 255, 255); padding-top: 15px; text-align: left; } @@ -356,54 +355,54 @@ h2.day, h2.network { line-height: 36px; font-weight: bold; letter-spacing: 1px; - color: #FFF; + color: rgb(255, 255, 255); text-align: center; - text-shadow: -1px -1px 0px rgba(0, 0, 0, 0.3); - background-color: #15528F; + text-shadow: -1px -1px 0 rgba(0, 0, 0, 0.3); + background-color: rgb(21, 82, 143); } .tvshowDiv { display: block; clear: both; - border: 1px solid #ccc; + border: 1px solid rgb(204, 204, 204); margin: auto; - padding: 0px; + padding: 0; text-align: left; width: 750px; border-radius: 5px; - background: #fff; + background: rgb(255, 255, 255); cursor: default; overflow: hidden; - color: #000; + color: rgb(0, 0, 0); } .tvshowDiv a:hover { - color: #09A2FF; + color: rgb(9, 162, 255); } #showListTable td.tvShow a { - color: #000; + color: rgb(0, 0, 0); } #showListTable td.tvShow a:hover { cursor: pointer; - color: #09A2FF; + color: rgb(9, 162, 255); } table.cal-odd { - background-color: #333; + background-color: rgb(51, 51, 51); } table.cal-even { - background-color: #3d3d3d; + background-color: rgb(61, 61, 61); } .calendarShow .text .airtime { - color:#fff + color: rgb(255, 255, 255); } .calendarShow .text .episode-title { - color:#aaa + color: rgb(170, 170, 170); } /* ======================================================================= @@ -412,14 +411,14 @@ config*.mako .component-group { padding: 15px 15px 25px; - border-bottom: 1px dotted #555; + border-bottom: 1px dotted rgb(85, 85, 85); min-height: 200px; } .component-group-desc p { width: 90%; margin: 10px 0; - color: #ddd; + color: rgb(221, 221, 221); } #provider_order_list li, @@ -427,16 +426,16 @@ config*.mako padding: 5px; margin: 5px 0; font-size: 14px; - background: #333 !important; - color: #fff; + background: rgb(51, 51, 51) !important; + color: rgb(255, 255, 255); } #provider_order_list .ui-state-default.torrent-provider { - background-color: #555 !important; + background-color: rgb(85, 85, 85) !important; } #provider_order_list .ui-state-default.nzb-provider { - background-color: #222 !important; + background-color: rgb(34, 34, 34) !important; } /* ======================================================================= @@ -445,16 +444,16 @@ config_postProcessing.mako #config div.example { padding: 10px; - background-color: #333333; - border: 1px solid #111; + background-color: rgb(51, 51, 51); + border: 1px solid rgb(17, 17, 17); } .Key { width: 100%; padding: 6px; font-size: 13px; - background-color: #3d3d3d; - border: 1px solid #111; + background-color: rgb(61, 61, 61); + border: 1px solid rgb(17, 17, 17); border-collapse: collapse; border-spacing: 0; } @@ -462,17 +461,17 @@ config_postProcessing.mako .Key th, .tableHeader { padding: 3px 9px; margin: 0; - color: #fff; + color: rgb(255, 255, 255); text-align: center; - background: #15528F; + background: rgb(21, 82, 143); } .Key tr { - border-bottom: 1px solid #111; + border-bottom: 1px solid rgb(17, 17, 17); } .Key tr.even { - background-color: #333; + background-color: rgb(51, 51, 51); } /* ======================================================================= @@ -482,14 +481,14 @@ config_notifications.mako div.metadata_options { padding: 7px; overflow: auto; - background: #333; - color: #fff; - border: 1px solid #111; + background: rgb(51, 51, 51); + color: rgb(255, 255, 255); + border: 1px solid rgb(17, 17, 17); } div.metadata_options label:hover { - color: #fff; - background-color: #15528F; + color: rgb(255, 255, 255); + background-color: rgb(21, 82, 143); cursor: pointer; } @@ -497,13 +496,13 @@ div.metadata_options label { display: block; padding-left: 7px; line-height: 20px; - color: #fff; + color: rgb(255, 255, 255); } div.metadata_example label { display: block; line-height: 21px; - color: #fff; + color: rgb(255, 255, 255); cursor: pointer; } @@ -513,7 +512,7 @@ manage*.mako .separator { font-size: 90%; - color: #fff; + color: rgb(255, 255, 255); } a.whitelink { @@ -525,8 +524,8 @@ Global span.path { padding: 3px 6px; - color: #09A2FF; - background-color: #333; + color: rgb(9, 162, 255); + background-color: rgb(51, 51, 51); } /* ======================================================================= @@ -537,71 +536,71 @@ body { padding-top: 60px; overflow-y: scroll; font-family: "Open Sans", "Helvetica Neue", Helvetica, Arial, sans-serif; - color: #fff; - background-color: #222; + color: rgb(255, 255, 255); + background-color: rgb(34, 34, 34); } /* navbar styling */ .navbar-default { - background-color: #15528F; + background-color: rgb(21, 82, 143); filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#297AB8', endColorstr='#15528F'); - background: -webkit-gradient(linear, left top, left bottom, from(#297AB8), to(#15528F)); - background: -moz-linear-gradient(top, #297AB8, #15528F); - border-color: #3e3f3a; + background: -webkit-gradient(linear, left top, left bottom, from(rgb(41, 122, 184)), to(rgb(21, 82, 143))); + background: -moz-linear-gradient(top, rgb(41, 122, 184), rgb(21, 82, 143)); + border-color: rgb(62, 63, 58); } .navbar-default .navbar-nav > li > a:hover, .navbar-default .navbar-nav > li > a:focus { - background-color: #124477; + background-color: rgb(18, 68, 119); } .navbar-default .navbar-nav > .active > a, .navbar-default .navbar-nav > .active > a:hover, .navbar-default .navbar-nav > .active > a:focus { - color: #ffffff; - background-color: #124477; + color: rgb(255, 255, 255); + background-color: rgb(18, 68, 119); } .navbar-default .navbar-toggle:hover, .navbar-default .navbar-toggle:focus { - background-color: #124477; + background-color: rgb(18, 68, 119); } .navbar-default .navbar-toggle .icon-bar { - background-color: #124477; + background-color: rgb(18, 68, 119); } .navbar-default .navbar-nav > .open > a, .navbar-default .navbar-nav > .open > a:hover, .navbar-default .navbar-nav > .open > a:focus { - background-color: #124477; - color: #ffffff; + background-color: rgb(18, 68, 119); + color: rgb(255, 255, 255); } @media (max-width: 767px) { .navbar-default .navbar-nav .open .dropdown-menu > .active > a, .navbar-default .navbar-nav .open .dropdown-menu > .active > a:hover, .navbar-default .navbar-nav .open .dropdown-menu > .active > a:focus { - color: #ffffff; - background-color: #124477; + color: rgb(255, 255, 255); + background-color: rgb(18, 68, 119); } } .dropdown-menu > li > a:hover, .dropdown-menu > li > a:focus { - color: #fff; + color: rgb(255, 255, 255); text-decoration: none; - background-color: #15528F; + background-color: rgb(21, 82, 143); } .dropdown-menu > li > a { padding: 4px 36px 4px 20px; - color: #fff; + color: rgb(255, 255, 255); } .dropdown-menu { - background-color: #333; + background-color: rgb(51, 51, 51); border: 1px solid rgba(0, 0, 0, 0.15); - box-shadow: 0px 6px 12px rgba(0, 0, 0, 0.176); + box-shadow: 0 6px 12px rgba(0, 0, 0, 0.176); } .btn { @@ -613,25 +612,25 @@ body { font-size: 12px; line-height: 16px; *line-height: 20px; - color: #fff; + color: rgb(255, 255, 255); text-align: center; text-shadow: 0 1px 1px rgba(0, 0, 0, 0.75); vertical-align: middle; cursor: pointer; - background-color: #2672B6; - *background-color: #2672B6; - background-image: -ms-linear-gradient(top, #297AB8, #15528F); - background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#297AB8), to(#15528F)); - background-image: -webkit-linear-gradient(top, #297AB8, #15528F); - background-image: -o-linear-gradient(top, #297AB8, #15528F); - background-image: linear-gradient(top, #297AB8, #15528F); - background-image: -moz-linear-gradient(top, #297AB8, #15528F); + background-color: rgb(38, 114, 182); + *background-color: rgb(38, 114, 182); + background-image: -ms-linear-gradient(top, rgb(41, 122, 184), rgb(21, 82, 143)); + background-image: -webkit-gradient(linear, 0 0, 0 100%, from(rgb(41, 122, 184)), to(rgb(21, 82, 143))); + background-image: -webkit-linear-gradient(top, rgb(41, 122, 184), rgb(21, 82, 143)); + background-image: -o-linear-gradient(top, rgb(41, 122, 184), rgb(21, 82, 143)); + background-image: linear-gradient(top, rgb(41, 122, 184), rgb(21, 82, 143)); + background-image: -moz-linear-gradient(top, rgb(41, 122, 184), rgb(21, 82, 143)); background-repeat: repeat-x; - border: 1px solid #111; + border: 1px solid rgb(17, 17, 17); *border: 0; border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); - border-color: #111 #111 #111; - border-bottom-color: #111; + border-color: rgb(17, 17, 17) rgb(17, 17, 17) rgb(17, 17, 17); + border-bottom-color: rgb(17, 17, 17); -webkit-border-radius: 4px; -moz-border-radius: 4px; border-radius: 4px; @@ -648,22 +647,22 @@ body { .btn.active, .btn.disabled, .btn[disabled] { - background-color: #2672B6; - *background-color: #2672B6; - color: #fff; + background-color: rgb(38, 114, 182); + *background-color: rgb(38, 114, 182); + color: rgb(255, 255, 255); } .btn:active, .btn.active { - background-color: #cccccc \9; - color: #fff; + background-color: rgb(204, 204, 204) \9; + color: rgb(255, 255, 255); } .btn:hover { - color: #fff; + color: rgb(255, 255, 255); text-decoration: none; - background-color: #2672B6; - *background-color: #2672B6; + background-color: rgb(38, 114, 182); + *background-color: rgb(38, 114, 182); background-position: 0 -150px; -webkit-transition: background-position 0.0s linear; -moz-transition: background-position 0.0s linear; @@ -673,18 +672,18 @@ body { } .btn:focus { - outline: thin dotted #333; + outline: thin dotted rgb(51, 51, 51); outline: 5px auto -webkit-focus-ring-color; outline-offset: -2px; - color: #fff; + color: rgb(255, 255, 255); } .btn.active, .btn:active { - background-color: #2672B6; - background-color: #2672B6 \9; + background-color: rgb(38, 114, 182); + background-color: rgb(38, 114, 182) \9; background-image: none; - color: #fff; + color: rgb(255, 255, 255); outline: 0; -webkit-box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05); -moz-box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05); @@ -694,7 +693,7 @@ body { .btn.disabled, .btn[disabled] { cursor: default; - background-color: #15528F; + background-color: rgb(21, 82, 143); background-image: none; opacity: 0.65; filter: alpha(opacity=65); @@ -710,16 +709,16 @@ body { right: 12px; display: inline-block; border-right: 6px solid transparent; - border-bottom: 6px solid #333; + border-bottom: 6px solid rgb(51, 51, 51); border-left: 6px solid transparent; content: ""; } } pre { - color: #fff; - background-color: #3d3d3d; - border-color: #111; + color: rgb(255, 255, 255); + background-color: rgb(61, 61, 61); + border-color: rgb(17, 17, 17); } /* ======================================================================= @@ -730,11 +729,11 @@ browser.css overrides margin: 2px 0; list-style-type: none; cursor: pointer; - background: #333 !important; + background: rgb(51, 51, 51) !important; } .fileBrowserDialog ul li a:hover { - color: #09a2ff; + color: rgb(9, 162, 255); background: none !important; } @@ -743,25 +742,25 @@ formWizard.css ========================================================================== */ legend.legendStep { - color: #ffffff; - margin-bottom: 0px; + color: rgb(255, 255, 255); + margin-bottom: 0; } div.stepsguide .step p { margin: 12px 0; - border-bottom: 4px solid #23AFDC; + border-bottom: 4px solid rgb(35, 175, 220); } div.stepsguide .disabledstep p { - border-bottom: 4px solid #1178B3; + border-bottom: 4px solid rgb(17, 120, 179); } div.formpaginate .prev, div.formpaginate .next { padding: 3px 6px; - color: #fff; + color: rgb(255, 255, 255); cursor: hand; cursor: pointer; - background: #2265A1; + background: rgb(34, 101, 161); -webkit-border-radius: 6px; -moz-border-radius: 6px; border-radius: 6px; @@ -772,25 +771,25 @@ pnotify.css ========================================================================== */ .ui-pnotify-container { - border: 1px solid #111; - background-image: -moz-linear-gradient(#333, #3d3d3d) !important; - background-image: linear-gradient(#333, #3d3d3d) !important; - background-image: -webkit-linear-gradient(#333, #3d3d3d) !important; - background-image: -o-linear-gradient(#333, #3d3d3d) !important; - filter: progid:dximagetransform.microsoft.gradient(startColorstr=#333, endColorstr=#3d3d3d) !important; - -ms-filter: progid:dximagetransform.microsoft.gradient(startColorstr=#333, endColorstr=#3d3d3d) !important; - -moz-box-shadow: 0px 0px 2px #000; - -webkit-box-shadow: 0px 0px 2px #000; - -o-box-shadow: 0px 0px 2px #000; - box-shadow: 0px 0px 2px #000; + border: 1px solid rgb(17, 17, 17); + background-image: -moz-linear-gradient(rgb(51, 51, 51), rgb(61, 61, 61)) !important; + background-image: linear-gradient(rgb(51, 51, 51), rgb(61, 61, 61)) !important; + background-image: -webkit-linear-gradient(rgb(51, 51, 51), rgb(61, 61, 61)) !important; + background-image: -o-linear-gradient(rgb(51, 51, 51), rgb(61, 61, 61)) !important; + filter: progid:dximagetransform.microsoft.gradient(startColorstr=rgb(51, 51, 51), endColorstr=rgb(61, 61, 61)) !important; + -ms-filter: progid:dximagetransform.microsoft.gradient(startColorstr=rgb(51, 51, 51), endColorstr=rgb(61, 61, 61)) !important; + -moz-box-shadow: 0 0 2px rgb(0, 0, 0); + -webkit-box-shadow: 0 0 2px rgb(0, 0, 0); + -o-box-shadow: 0 0 2px rgb(0, 0, 0); + box-shadow: 0 0 2px rgb(0, 0, 0); } .ui-pnotify-title { - color: #ffffff; + color: rgb(255, 255, 255); } .ui-pnotify-text { - color: #ffffff; + color: rgb(255, 255, 255); } /* ======================================================================= @@ -801,60 +800,60 @@ tablesorter.css width: 100%; margin-right: auto; margin-left: auto; - color: #fff; + color: rgb(255, 255, 255); text-align: left; - background-color: #333; + background-color: rgb(51, 51, 51); border-spacing: 0; } .tablesorter th, .tablesorter td { padding: 4px; - border-top: #222 1px solid; - border-left: #222 1px solid; + border-top: rgb(34, 34, 34) 1px solid; + border-left: rgb(34, 34, 34) 1px solid; vertical-align: middle; } .tablesorter th { - color: #fff; + color: rgb(255, 255, 255); text-align: center; text-shadow: -1px -1px 0 rgba(0,0,0,0.3); - background-color: #15528F; + background-color: rgb(21, 82, 143); border-collapse: collapse; font-weight: normal; } .tablesorter thead .tablesorter-headerDesc { - background-color: #297AB8; + background-color: rgb(41, 122, 184); background-image: url(data:image/gif;base64,R0lGODlhFQAEAIAAAP///////yH5BAEAAAEALAAAAAAVAAQAAAINjB+gC+jP2ptn0WskLQA7); /* background-image: url(../images/tablesorter/asc.gif); */ } .tablesorter thead .tablesorter-headerAsc { - background-color: #297AB8; + background-color: rgb(41, 122, 184); background-image: url(data:image/gif;base64,R0lGODlhFQAEAIAAAP///////yH5BAEAAAEALAAAAAAVAAQAAAINjI8Bya2wnINUMopZAQA7); /* background-image: url(../images/tablesorter/desc.gif); */ } thead.tablesorter-stickyHeader { - border-top: 2px solid #222; - border-bottom: 2px solid #222; + border-top: 2px solid rgb(34, 34, 34); + border-bottom: 2px solid rgb(34, 34, 34); } /* Zebra Widget - row alternating colors */ .tablesorter tr.odd, .sickbeardTable tr.odd { - background-color: #333333; + background-color: rgb(51, 51, 51); } .tablesorter tr.even, .sickbeardTable tr.even { - background-color: #2e2e2e; + background-color: rgb(46, 46, 46); } .tablesorter tr.tablesorter-filter-row, .tablesorter tr.tablesorter-filter-row td { text-align: center; - background: #333; - border-bottom: 1px solid #111; + background: rgb(51, 51, 51); + border-bottom: 1px solid rgb(17, 17, 17); } /* hidden filter row */ @@ -881,7 +880,7 @@ thead.tablesorter-stickyHeader { } #showListTable tbody { - color: #000; + color: rgb(0, 0, 0); } /* ======================================================================= @@ -889,11 +888,11 @@ token-input.css ========================================================================== */ div.token-input-dropdown { - background-color: #fff; - color: #000; - border-left-color: #ccc; - border-right-color: #ccc; - border-bottom-color: #ccc; + background-color: rgb(255, 255, 255); + color: rgb(0, 0, 0); + border-left-color: rgb(204, 204, 204); + border-right-color: rgb(204, 204, 204); + border-bottom-color: rgb(204, 204, 204); } /* ======================================================================= @@ -901,29 +900,29 @@ jquery.confirm.css ========================================================================== */ #confirmBox{ - background: #222; + background: rgb(34, 34, 34); width: 460px; position: fixed; left: 50%; top: 50%; margin: -130px 0 0 -230px; - border: 1px solid #111; - box-shadow: 0px 0px 12px 0px rgba(0, 0, 0, 0.175); + border: 1px solid rgb(17, 17, 17); + box-shadow: 0 0 12px 0 rgba(0, 0, 0, 0.175); } #confirmBox h1 { - background-color: #15528F; - border-bottom: 1px solid #111; - color: #fff; + background-color: rgb(21, 82, 143); + border-bottom: 1px solid rgb(17, 17, 17); + color: rgb(255, 255, 255); margin: 0; font-size: 22px; - text-shadow: 0px 1px 1px rgba(0, 0, 0, 0.75); + text-shadow: 0 1px 1px rgba(0, 0, 0, 0.75); } #confirmBox p { padding-top: 20px; - color: #fff; - text-shadow: 0px 1px 1px rgba(0, 0, 0, 0.75); + color: rgb(255, 255, 255); + text-shadow: 0 1px 1px rgba(0, 0, 0, 0.75); } /* ======================================================================= @@ -931,17 +930,17 @@ bootstrap modal ========================================================================== */ .modal-content { - background-color: #3D3D3D; + background-color: rgb(61, 61, 61); } .modal-body { - background-color: #3D3D3D; + background-color: rgb(61, 61, 61); } .modal-header { padding:9px 15px; - border-bottom:1px solid #eee; - background-color: #15528F; + border-bottom:1px solid rgb(238, 238, 238); + background-color: rgb(21, 82, 143); -webkit-border-top-left-radius: 5px; -webkit-border-top-right-radius: 5px; -moz-border-radius-topleft: 5px; @@ -955,13 +954,13 @@ bootstrap panel ========================================================================== */ .panel-default { - background-color: #3D3D3D; - border-color: #111111; + background-color: rgb(61, 61, 61); + border-color: rgb(17, 17, 17); } .panel-heading { - background-color: #3D3D3D !important; - color: #FFFFFF !important; + background-color: rgb(61, 61, 61) !important; + color: rgb(255, 255, 255) !important; } @@ -975,22 +974,22 @@ new #confirmBox left: 50%; top: 50%; margin: -130px 0 0 -230px; - border: 1px solid #111; - box-shadow: 0px 0px 12px 0px rgba(0, 0, 0, 0.175); - border-radius: 0px; + border: 1px solid rgb(17, 17, 17); + box-shadow: 0 0 12px 0 rgba(0, 0, 0, 0.175); + border-radius: 0; } .modal-content { - border-radius: 0px; + border-radius: 0; } .modal-header { border-bottom: none; - border-radius: 0px; + border-radius: 0; } .modal-body, .modal-content{ - background: #222; + background: rgb(34, 34, 34); } .modal-footer { @@ -1007,7 +1006,7 @@ new #confirmBox } .modal-header .close { - display: none + display: none; } .modal-footer button { @@ -1015,11 +1014,11 @@ new #confirmBox padding: 2px 15px; text-decoration: none; display: inline-block; - color: #fff; + color: rgb(255, 255, 255); text-align:center; - text-shadow: 0px 1px 1px rgba(0, 0, 0, 0.75); + text-shadow: 0 1px 1px rgba(0, 0, 0, 0.75); background-clip: padding-box; - border: 1px solid #111; + border: 1px solid rgb(17, 17, 17); border-radius: 3px; cursor: pointer; -webkit-box-sizing: border-box; @@ -1038,17 +1037,17 @@ new #confirmBox } .modal-footer button.confirm { - background-color: #3F7636; + background-color: rgb(63, 118, 54); } .modal-footer button.confirm:hover { - background-color: #48873E; + background-color: rgb(72, 135, 62); } .modal-footer button.cancel { - background-color: #8D2D2B; + background-color: rgb(141, 45, 43); } .modal-footer button.cancel:hover { - background-color: #A13331; + background-color: rgb(161, 51, 49); } diff --git a/gui/slick/css/light.css b/gui/slick/css/light.css index 1fbaed69c0..0d9c7a7563 100644 --- a/gui/slick/css/light.css +++ b/gui/slick/css/light.css @@ -3,50 +3,50 @@ home.mako ========================================================================== */ .progress-100 { - background-image: -moz-linear-gradient(#a6cf41, #5b990d) !important; - background-image: linear-gradient(#a6cf41, #5b990d) !important; - background-image: -webkit-linear-gradient(#a6cf41, #5b990d) !important; - background-image: -o-linear-gradient(#a6cf41, #5b990d) !important; + background-image: -moz-linear-gradient(rgb(166, 207, 65), rgb(91, 153, 13)) !important; + background-image: linear-gradient(rgb(166, 207, 65), rgb(91, 153, 13)) !important; + background-image: -webkit-linear-gradient(rgb(166, 207, 65), rgb(91, 153, 13)) !important; + background-image: -o-linear-gradient(rgb(166, 207, 65), rgb(91, 153, 13)) !important; -moz-border-radius: 3px; -webkit-border-radius: 3px; border-radius: 3px; } .progress-80 { - background-image: -moz-linear-gradient(#e1ff97, #9db269) !important; - background-image: linear-gradient(#e1ff97, #9db269) !important; - background-image: -webkit-linear-gradient(#e1ff97, #9db269) !important; - background-image: -o-linear-gradient(#e1ff97, #9db269) !important; + background-image: -moz-linear-gradient(rgb(225, 255, 151), rgb(157, 178, 105)) !important; + background-image: linear-gradient(rgb(225, 255, 151), rgb(157, 178, 105)) !important; + background-image: -webkit-linear-gradient(rgb(225, 255, 151), rgb(157, 178, 105)) !important; + background-image: -o-linear-gradient(rgb(225, 255, 151), rgb(157, 178, 105)) !important; -moz-border-radius: 3px; -webkit-border-radius: 3px; border-radius: 3px; } .progress-60 { - background-image: -moz-linear-gradient(#fad440, #f2a70d) !important; - background-image: linear-gradient(#fad440, #f2a70d) !important; - background-image: -webkit-linear-gradient(#fad440, #f2a70d) !important; - background-image: -o-linear-gradient(#fad440, #f2a70d) !important; + background-image: -moz-linear-gradient(rgb(250, 212, 64), rgb(242, 167, 13)) !important; + background-image: linear-gradient(rgb(250, 212, 64), rgb(242, 167, 13)) !important; + background-image: -webkit-linear-gradient(rgb(250, 212, 64), rgb(242, 167, 13)) !important; + background-image: -o-linear-gradient(rgb(250, 212, 64), rgb(242, 167, 13)) !important; -moz-border-radius: 3px; -webkit-border-radius: 3px; border-radius: 3px; } .progress-40 { - background-image: -moz-linear-gradient(#fab543, #f2700d) !important; - background-image: linear-gradient(#fab543, #f2700d) !important; - background-image: -webkit-linear-gradient(#fab543, #f2700d) !important; - background-image: -o-linear-gradient(#fab543, #f2700d) !important; + background-image: -moz-linear-gradient(rgb(250, 181, 67), rgb(242, 112, 13)) !important; + background-image: linear-gradient(rgb(250, 181, 67), rgb(242, 112, 13)) !important; + background-image: -webkit-linear-gradient(rgb(250, 181, 67), rgb(242, 112, 13)) !important; + background-image: -o-linear-gradient(rgb(250, 181, 67), rgb(242, 112, 13)) !important; -moz-border-radius: 3px; -webkit-border-radius: 3px; border-radius: 3px; } .progress-20 { - background-image: -moz-linear-gradient(#da5945, #b11a10) !important; - background-image: linear-gradient(#da5945, #b11a10) !important; - background-image: -webkit-linear-gradient(#da5945, #b11a10) !important; - background-image: -o-linear-gradient(#da5945, #b11a10) !important; + background-image: -moz-linear-gradient(rgb(218, 89, 69), rgb(177, 26, 16)) !important; + background-image: linear-gradient(rgb(218, 89, 69), rgb(177, 26, 16)) !important; + background-image: -webkit-linear-gradient(rgb(218, 89, 69), rgb(177, 26, 16)) !important; + background-image: -o-linear-gradient(rgb(218, 89, 69), rgb(177, 26, 16)) !important; -moz-border-radius: 3px; -webkit-border-radius: 3px; border-radius: 3px; @@ -58,8 +58,8 @@ displayShow.mako .displayShowTable th.row-seasonheader { border: none !important; - background-color: #fff !important; - color: #000 !important; + background-color: rgb(255, 255, 255) !important; + color: rgb(0, 0, 0) !important; padding-top: 15px !important; text-align: left !important; } @@ -69,7 +69,7 @@ testRename.mako ========================================================================== */ tr.seasonheader { - color: #000 !important; + color: rgb(0, 0, 0) !important; } /* ======================================================================= @@ -77,19 +77,19 @@ schedule.mako ========================================================================== */ table.cal-odd { - background-color: #ddd; + background-color: rgb(221, 221, 221); } table.cal-even { - background-color: #d2d2d2; + background-color: rgb(210, 210, 210); } .calendarShow .text .airtime { - color:#000 + color: rgb(0, 0, 0); } .calendarShow .text .episode-title { - color:#888 + color: rgb(136, 136, 136); } /* ======================================================================= @@ -98,33 +98,33 @@ bootstrap Overrides /* navbar styling */ .navbar-default { - background-color: #333333; + background-color: rgb(51, 51, 51); filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#555555', endColorstr='#333333'); - background: -webkit-gradient(linear, left top, left bottom, from(#555), to(#333)); - background: -moz-linear-gradient(top, #555, #333); - border-color: #3e3f3a; + background: -webkit-gradient(linear, left top, left bottom, from(rgb(85, 85, 85)), to(rgb(51, 51, 51))); + background: -moz-linear-gradient(top, rgb(85, 85, 85), rgb(51, 51, 51)); + border-color: rgb(62, 63, 58); } .navbar-default .navbar-collapse, .navbar-default .navbar-form { - border-color: #3e3f3a; + border-color: rgb(62, 63, 58); } .navbar-default .navbar-nav > li > a:hover, .navbar-default .navbar-nav > li > a:focus { - background-color: #333; + background-color: rgb(51, 51, 51); } .dropdown-menu > li > a:hover, .dropdown-menu > li > a:focus { - color: #fff; + color: rgb(255, 255, 255); text-decoration: none; - background-color: #333; + background-color: rgb(51, 51, 51); } pre { - color: #000; - background-color: #F5F5F5; - border-color: #ccc; + color: rgb(0, 0, 0); + background-color: rgb(245, 245, 245); + border-color: rgb(204, 204, 204); } /* ======================================================================= @@ -135,9 +135,9 @@ tablesorter.css width: 100%; margin-right: auto; margin-left: auto; - color: #000; + color: rgb(0, 0, 0); text-align: left; - background-color: #ddd; + background-color: rgb(221, 221, 221); border-spacing: 0; } @@ -169,11 +169,11 @@ token-input.css ========================================================================== */ div.token-input-dropdown { - background-color: #fff; - color: #000; - border-left-color: #ccc; - border-right-color: #ccc; - border-bottom-color: #ccc; + background-color: rgb(255, 255, 255); + color: rgb(0, 0, 0); + border-left-color: rgb(204, 204, 204); + border-right-color: rgb(204, 204, 204); + border-bottom-color: rgb(204, 204, 204); } /* ======================================================================= @@ -182,8 +182,8 @@ bootstarp modal .modal-header { padding:9px 15px; - border-bottom:1px solid #eee; - background-color: #F5F1E4; + border-bottom:1px solid rgb(238, 238, 238); + background-color: rgb(245, 241, 228); -webkit-border-top-left-radius: 5px; -webkit-border-top-right-radius: 5px; -moz-border-radius-topleft: 5px; diff --git a/gui/slick/css/style.css b/gui/slick/css/style.css index be4e2d6002..fe9a25794d 100644 --- a/gui/slick/css/style.css +++ b/gui/slick/css/style.css @@ -1,3 +1,6 @@ +strong.warning { + color: red; +} /* ======================================================================= inc_top.mako ========================================================================== */ @@ -17,7 +20,7 @@ inc_top.mako .ui-autocomplete-loading { - background: white url("../images/loading16.gif") right center no-repeat; + background: rgb(255, 255, 255) url("../images/loading16.gif") right center no-repeat; } .browserDialog.busy .ui-dialog-buttonpane { @@ -30,7 +33,7 @@ inc_top.mako .ui-dialog, .ui-dialog-buttonpane { - background: #eceadf url("../css/lib/images/ui-bg_fine-grain_10_eceadf_60x60.png") 50% 50% repeat !important; + background: rgb(236, 234, 223) url("../css/lib/images/ui-bg_fine-grain_10_eceadf_60x60.png") 50% 50% repeat !important; } /* restore 1.8.x resize handle on dialog button pane */ @@ -44,7 +47,7 @@ inc_top.mako .ui-accordion-content, .ui-tabs-panel { - background: #ededed !important; + background: rgb(237, 237, 237) !important; background-image: none !important; } @@ -54,18 +57,18 @@ inc_top.mako } .ui-widget-content { - background: #dcdcdc url("../css/lib/images/ui-bg_highlight-soft_75_dcdcdc_1x100.png") 50% top repeat-x; + background: rgb(220, 220, 220) url("../css/lib/images/ui-bg_highlight-soft_75_dcdcdc_1x100.png") 50% top repeat-x; } .ui-widget-header { - background: #ffffff url("../css/lib/images/ui-bg_flat_0_ffffff_40x100.png") 50% 50% repeat-x; + background: rgb(255, 255, 255) url("../css/lib/images/ui-bg_flat_0_ffffff_40x100.png") 50% 50% repeat-x; } .ui-state-default, .ui-widget-content .ui-state-default, .ui-widget-header .ui-state-default { - background: #ffffff; - border: 1px solid #CCCCCC; + background: rgb(255, 255, 255); + border: 1px solid rgb(204, 204, 204); } .ui-state-hover, @@ -74,25 +77,25 @@ inc_top.mako .ui-state-focus, .ui-widget-content .ui-state-focus, .ui-widget-header .ui-state-focus { - background: #ffffff; + background: rgb(255, 255, 255); } .ui-state-active, .ui-widget-content .ui-state-active, .ui-widget-header .ui-state-active { - background: #F7F7F7; + background: rgb(247, 247, 247); } .ui-state-highlight, .ui-widget-content .ui-state-highlight, .ui-widget-header .ui-state-highlight { - background: #fbf9ee url("../css/lib/images/ui-bg_glass_55_fbf9ee_1x400.png") 50% 50% repeat-x; + background: rgb(251, 249, 238) url("../css/lib/images/ui-bg_glass_55_fbf9ee_1x400.png") 50% 50% repeat-x; } .ui-state-error, .ui-widget-content .ui-state-error, .ui-widget-header .ui-state-error { - background: #fef1ec url("../css/lib/images/ui-bg_glass_95_fef1ec_1x400.png") 50% 50% repeat-x; + background: rgb(254, 241, 236) url("../css/lib/images/ui-bg_glass_95_fef1ec_1x400.png") 50% 50% repeat-x; } .ui-icon, @@ -127,41 +130,41 @@ inc_top.mako } .ui-widget-overlay { - background: #aaaaaa url("../css/lib/images/ui-bg_flat_0_aaaaaa_40x100.png") 50% 50% repeat-x; + background: rgb(170, 170, 170) url("../css/lib/images/ui-bg_flat_0_aaaaaa_40x100.png") 50% 50% repeat-x; } .ui-widget-shadow { - background: #000000 url("../css/lib/images/ui-bg_flat_0_000000_40x100.png") 50% 50% repeat-x; + background: rgb(0, 0, 0) url("../css/lib/images/ui-bg_flat_0_000000_40x100.png") 50% 50% repeat-x; } .ui-state-active a, .ui-state-active a:link, .ui-state-active a:visited { - color: #140F06; + color: rgb(20, 15, 6); text-decoration: none; } .ui-state-default a, .ui-state-default a:link, .ui-state-default a:visited { - color: #222; + color: rgb(34, 34, 34); text-decoration: none; } .ui-tabs { - padding: 0px; + padding: 0; background: none; - border-width: 0px; + border-width: 0; } .ui-tabs .ui-tabs-nav { - padding-left: 0px; + padding-left: 0; background: transparent; - border-width: 0px 0px 0px 0px; - -moz-border-radius: 0px; - -webkit-border-radius: 0px; - border-radius: 0px; + border-width: 0 0 0 0; + -moz-border-radius: 0; + -webkit-border-radius: 0; + border-radius: 0; } .ui-tabs .ui-tabs-panel { - background-color: #F7F7F7 !important; - border: 1px solid #CCCCCC !important; + background-color: rgb(247, 247, 247) !important; + border: 1px solid rgb(204, 204, 204) !important; padding: 1em; } @@ -197,7 +200,7 @@ inc_top.mako margin-left: auto; margin-right: auto; margin-top: 50px; - margin-bottom: 0px; + margin-bottom: 0; } [class^="menu-icon-"], [class*=" menu-icon-"] { @@ -211,155 +214,155 @@ inc_top.mako } .menu-icon-addshow { - background-position: 0px 0px; + background-position: 0 0; } .menu-icon-anime { - background-position: -21px 0px; + background-position: -21px 0; } .menu-icon-backlog-view { - background-position: -42px 0px; + background-position: -42px 0; } .menu-icon-backlog { - background-position: -63px 0px; + background-position: -63px 0; } .menu-icon-bittorrent { - background-position: -84px 0px; + background-position: -84px 0; } .menu-icon-config-index { - background-position: -105px 0px; + background-position: -105px 0; } .menu-icon-config { - background-position: -126px 0px; + background-position: -126px 0; } .menu-icon-failed-download { - background-position: -147px 0px; + background-position: -147px 0; } .menu-icon-home { - background-position: -168px 0px; + background-position: -168px 0; } .menu-icon-manage { - background-position: -189px 0px; + background-position: -189px 0; } .menu-icon-manage-searches { - background-position: -210px 0px; + background-position: -210px 0; } .menu-icon-poster { - background-position: -231px 0px; + background-position: -231px 0; } .menu-icon-postprocess { - background-position: -252px 0px; + background-position: -252px 0; } .menu-icon-restart { - background-position: -273px 0px; + background-position: -273px 0; } .menu-icon-shutdown { - background-position: -294px 0px; + background-position: -294px 0; } .menu-icon-update { - background-position: -315px 0px; + background-position: -315px 0; } .menu-icon-viewlog-errors { - background-position: -336px 0px; + background-position: -336px 0; } .menu-icon-viewlog { - background-position: -357px 0px; + background-position: -357px 0; } .menu-icon-kodi { - background-position: -378px 0px; + background-position: -378px 0; } .menu-icon-help { - background-position: -399px 0px; + background-position: -399px 0; } .menu-icon-info { - background-position: -418px 0px; + background-position: -418px 0; } .menu-icon-error { - background-position: -436px 0px; + background-position: -436px 0; } .menu-icon-news { - background-position: -456px 0px; + background-position: -456px 0; } .menu-icon-irc { - background-position: -478px 0px; + background-position: -478px 0; } .menu-icon-changelog { - background-position: -495px 0px; + background-position: -495px 0; } .menu-icon-support { - background-position: -516px 0px; + background-position: -516px 0; } .menu-icon-plex { - background-position: -536px 0px; + background-position: -536px 0; } .menu-icon-backup { - background-position: -556px 0px; + background-position: -556px 0; } .menu-icon-provider { - background-position: -576px 0px; + background-position: -576px 0; } .menu-icon-notification { - background-position: -597px 0px; + background-position: -597px 0; } .menu-icon-emby { - background-position: -614px 0px; + background-position: -614px 0; } .menu-icon-blackhole { - background-position: -632px 0px; + background-position: -632px 0; } .menu-icon-schedule { - background-position: -653px 0px; + background-position: -653px 0; } .menu-icon-manage2 { - background-position: -673px 0px; + background-position: -673px 0; } .menu-icon-history { - background-position: -695px 0px; + background-position: -695px 0; } .menu-icon-trash { - background-position: -711px 0px; + background-position: -711px 0; } .menu-icon-cut { - background-position: -727px 0px; + background-position: -727px 0; } .menu-icon-select { - background-position: -742px 0px; + background-position: -742px 0; } .enable-daily-search-icon { @@ -394,13 +397,13 @@ inc_bottom.mako .footer { width: 100%; padding: 20px 0; - color: #4e4e4e; + color: rgb(78, 78, 78); text-align: center; font-size: 12px; } .footerhighlight { - color: #111; + color: rgb(17, 17, 17); display: inline; } @@ -410,13 +413,13 @@ inc_rootDirs.mako .rootdir-selectbox, .rootdir-selectbox #rootDirs, .rootdir-controls { - width: 430px + width: 430px; } .rootdir-selectbox { - padding: 0 0 5px + padding: 0 0 5px; } .rootdir-controls { - text-align: center + text-align: center; } /* ======================================================================= @@ -424,10 +427,9 @@ home.mako ========================================================================== */ .imgbanner .banner { - border: 1px solid #ccc; + border: 1px solid rgb(204, 204, 204); overflow: hidden; height: 66px; - overflow: hidden; border-radius: 8px; vertical-align: top; width: 360px; @@ -442,7 +444,7 @@ home.mako border-radius: 3px; vertical-align: middle; width: 45px; - border: 1px solid #ccc; + border: 1px solid rgb(204, 204, 204); margin-right: 5px; } @@ -466,57 +468,57 @@ home.mako height: 100%; overflow: visible; text-align: center; - text-shadow: 0 0 0.1em #fff; + text-shadow: 0 0 0.1em rgb(255, 255, 255); vertical-align: middle; font-size: 12px; - color: #000000; + color: rgb(0, 0, 0); } .progress-100 { - background-image: -moz-linear-gradient(#395f07, #2a4705) !important; - background-image: linear-gradient(#395f07, #2a4705) !important; - background-image: -webkit-linear-gradient(#395f07, #2a4705) !important; - background-image: -o-linear-gradient(#395f07, #2a4705) !important; + background-image: -moz-linear-gradient(rgb(57, 95, 7), rgb(42, 71, 5)) !important; + background-image: linear-gradient(rgb(57, 95, 7), rgb(42, 71, 5)) !important; + background-image: -webkit-linear-gradient(rgb(57, 95, 7), rgb(42, 71, 5)) !important; + background-image: -o-linear-gradient(rgb(57, 95, 7), rgb(42, 71, 5)) !important; -moz-border-radius: 3px; -webkit-border-radius: 3px; border-radius: 3px; } .progress-80 { - background-image: -moz-linear-gradient(#a6cf41, #5b990d) !important; - background-image: linear-gradient(#a6cf41, #5b990d) !important; - background-image: -webkit-linear-gradient(#a6cf41, #5b990d) !important; - background-image: -o-linear-gradient(#a6cf41, #5b990d) !important; + background-image: -moz-linear-gradient(rgb(166, 207, 65), rgb(91, 153, 13)) !important; + background-image: linear-gradient(rgb(166, 207, 65), rgb(91, 153, 13)) !important; + background-image: -webkit-linear-gradient(rgb(166, 207, 65), rgb(91, 153, 13)) !important; + background-image: -o-linear-gradient(rgb(166, 207, 65), rgb(91, 153, 13)) !important; -moz-border-radius: 3px; -webkit-border-radius: 3px; border-radius: 3px; } .progress-60 { - background-image: -moz-linear-gradient(#fad440, #f2a70d) !important; - background-image: linear-gradient(#fad440, #f2a70d) !important; - background-image: -webkit-linear-gradient(#fad440, #f2a70d) !important; - background-image: -o-linear-gradient(#fad440, #f2a70d) !important; + background-image: -moz-linear-gradient(rgb(250, 212, 64), rgb(242, 167, 13)) !important; + background-image: linear-gradient(rgb(250, 212, 64), rgb(242, 167, 13)) !important; + background-image: -webkit-linear-gradient(rgb(250, 212, 64), rgb(242, 167, 13)) !important; + background-image: -o-linear-gradient(rgb(250, 212, 64), rgb(242, 167, 13)) !important; -moz-border-radius: 3px; -webkit-border-radius: 3px; border-radius: 3px; } .progress-40 { - background-image: -moz-linear-gradient(#fab543, #f2700d) !important; - background-image: linear-gradient(#fab543, #f2700d) !important; - background-image: -webkit-linear-gradient(#fab543, #f2700d) !important; - background-image: -o-linear-gradient(#fab543, #f2700d) !important; + background-image: -moz-linear-gradient(rgb(250, 181, 67), rgb(242, 112, 13)) !important; + background-image: linear-gradient(rgb(250, 181, 67), rgb(242, 112, 13)) !important; + background-image: -webkit-linear-gradient(rgb(250, 181, 67), rgb(242, 112, 13)) !important; + background-image: -o-linear-gradient(rgb(250, 181, 67), rgb(242, 112, 13)) !important; -moz-border-radius: 3px; -webkit-border-radius: 3px; border-radius: 3px; } .progress-20 { - background-image: -moz-linear-gradient(#da5945, #b11a10) !important; - background-image: linear-gradient(#da5945, #b11a10) !important; - background-image: -webkit-linear-gradient(#da5945, #b11a10) !important; - background-image: -o-linear-gradient(#da5945, #b11a10) !important; + background-image: -moz-linear-gradient(rgb(218, 89, 69), rgb(177, 26, 16)) !important; + background-image: linear-gradient(rgb(218, 89, 69), rgb(177, 26, 16)) !important; + background-image: -webkit-linear-gradient(rgb(218, 89, 69), rgb(177, 26, 16)) !important; + background-image: -o-linear-gradient(rgb(218, 89, 69), rgb(177, 26, 16)) !important; -moz-border-radius: 3px; -webkit-border-radius: 3px; border-radius: 3px; @@ -545,10 +547,10 @@ span.show-option { .show-container { margin: 4px; - background-color: #F3F3F3; - border: 5px solid #F3F3F3; + background-color: rgb(243, 243, 243); + border: 5px solid rgb(243, 243, 243); overflow: hidden; - box-shadow: 1px 1px 3px 0px rgba(0, 0, 0, 0.31); + box-shadow: 1px 1px 3px 0 rgba(0, 0, 0, 0.31); } .show-details { @@ -567,29 +569,29 @@ span.show-option { } .show-container .ui-corner-all, .ui-corner-bottom, .ui-corner-right, .ui-corner-br { - border-bottom-right-radius: 0px; + border-bottom-right-radius: 0; } .show-container .ui-corner-all, .ui-corner-bottom, .ui-corner-left, .ui-corner-bl { - border-bottom-left-radius: 0px; + border-bottom-left-radius: 0; } .show-container .ui-corner-all, .ui-corner-top, .ui-corner-right, .ui-corner-tr { - border-top-right-radius: 0px; + border-top-right-radius: 0; } .show-container .ui-corner-all, .ui-corner-top, .ui-corner-left, .ui-corner-tl { - border-top-left-radius: 0px; + border-top-left-radius: 0; } .show-container .ui-widget-content { - border-top: 1px solid #111; - border-bottom: 1px solid #111; - border-left: 0px; - border-right: 0px; + border-top: 1px solid rgb(17, 17, 17); + border-bottom: 1px solid rgb(17, 17, 17); + border-left: 0; + border-right: 0; } .ui-progressbar .ui-progressbar-value { - height:20px + height:20px; } .ui-progressbar .progress-20 { border: none; @@ -598,8 +600,8 @@ span.show-option { .show-container .progress-40, .show-container .progress-60, .show-container .progress-80 { - border-radius: 0px; - height: 7px + border-radius: 0; + height: 7px; } .show-title { @@ -607,7 +609,7 @@ span.show-option { overflow: hidden; white-space: nowrap; font-size: 11px; - margin: 4px 4px 0px 4px; + margin: 4px 4px 0 4px; } .show-title:after { @@ -630,8 +632,8 @@ span.show-option { overflow: hidden; white-space: nowrap; font-size: 11px; - margin: 0px 4px 4px 4px; - color: #949494; + margin: 0 4px 4px 4px; + color: rgb(148, 148, 148); } .show-date:after { @@ -652,7 +654,7 @@ span.show-option { .show-table { text-align:center; vertical-align:middle; - width: 33% + width: 33%; } .show-add { @@ -704,13 +706,13 @@ span.show-option { } td.tvShow a { - color: #000; + color: rgb(0, 0, 0); text-decoration: none; } td.tvShow a:hover { cursor: pointer; - color: #428BCA; + color: rgb(66, 139, 202); } #popover-target label { @@ -773,23 +775,23 @@ div.buttontext p { } .add-list-icon-addnewshow { - background-position: 0px 0px; + background-position: 0 0; } .add-list-icon-addtrakt { - background-position: -37px 0px; + background-position: -37px 0; } .add-list-icon-addimdb { - background-position: -76px 0px; + background-position: -76px 0; } .add-list-icon-addexistingshow { - background-position: -113px 0px; + background-position: -113px 0; } .add-list-icon-addanime { - background-position: -150px 0px; + background-position: -150px 0; } /* ======================================================================= @@ -815,8 +817,8 @@ home_newShow.mako padding: 8px; overflow: hidden; font-size: 14px; - background-color: #efefef; - border: 1px solid #dfdede; + background-color: rgb(239, 239, 239); + border: 1px solid rgb(223, 222, 222); } #searchResults input[type="radio"] { @@ -842,7 +844,7 @@ ul#rootDirStaticList li { margin: 2px; list-style: none outside none; cursor: pointer; - background: url('../css/lib/images/ui-bg_highlight-soft_75_efefef_1x100.png') repeat-x scroll 50% 50% #EFEFEF; + background: url('../css/lib/images/ui-bg_highlight-soft_75_efefef_1x100.png') repeat-x scroll 50% 50% rgb(239, 239, 239); } ul#rootDirStaticList li label { @@ -885,8 +887,8 @@ home_trendingShows.mako .traktContainer { margin: 12px; width: 188px; - background-color: #DFDACF; - border: 1px solid #111; + background-color: rgb(223, 218, 207); + border: 1px solid rgb(17, 17, 17); border-radius: 6px; } @@ -895,7 +897,7 @@ home_trendingShows.mako width: 186px; border-top-left-radius: 5px; border-top-right-radius: 5px; - border-bottom: 1px solid #111; + border-bottom: 1px solid rgb(17, 17, 17); } /* ======================================================================= @@ -934,7 +936,7 @@ displayShow.mako #topcontrol { -webkit-filter: grayscale(100%); filter: grayscale(100%); - filter: gray; + filter: rgb(128, 128, 128); filter: url("data:image/svg+xml;utf8,#greyscale"); } @@ -944,7 +946,7 @@ h1.title { line-height: 30px; text-align: left; text-rendering: optimizelegibility; - border-bottom: 1px solid #888; + border-bottom: 1px solid rgb(136, 136, 136); } h1.title a { @@ -997,21 +999,21 @@ ul.tags li { margin-right: 4px; margin-bottom: 5px; padding: 3px 4px 3px 25px; - background: url(../images/tag.png) no-repeat scroll 5px 4px #555; + background: url(../images/tag.png) no-repeat scroll 5px 4px rgb(85, 85, 85); border-radius: 3px; - border: 1px solid #111; - color: #FFF; + border: 1px solid rgb(17, 17, 17); + color: rgb(255, 255, 255); font: 14px/18px "Open Sans", "Helvetica Neue", Helvetica, Arial, Geneva, sans-serif; - text-shadow: 0px 1px rgba(0, 0, 0, 0.8); + text-shadow: 0 1px rgba(0, 0, 0, 0.8); float: left; } ul.tags li a{ - color: #FFF; + color: rgb(255, 255, 255); } .tvshowImg { - border: 1px solid #ccc; + border: 1px solid rgb(204, 204, 204); border-radius: 5px; height: 311px; width: auto; @@ -1020,8 +1022,8 @@ ul.tags li a{ #summary { padding: 10px; - background-color: #efefef; - border: 1px solid #dfdede; + background-color: rgb(239, 239, 239); + border: 1px solid rgb(223, 222, 222); width: 100%; height: 250px; overflow: auto; @@ -1054,47 +1056,52 @@ ul.tags li a{ } .unaired { - background-color: #f5f1e4; + background-color: rgb(245, 241, 228); } .skipped { - background-color: #bedeed; + background-color: rgb(190, 222, 237); } .good { - background-color: #c3e3c8; + background-color: rgb(195, 227, 200); } .qual { - background-color: #ffda8a; + background-color: rgb(255, 218, 138); } .wanted { - background-color: #ffb0b0; + background-color: rgb(255, 176, 176); } .snatched { - background-color: #ebc1ea; + background-color: rgb(235, 193, 234); +} +.failed { + text-decoration: line-through; + text-decoration-color: red; + background-color: grey; } span.unaired { - color: #584b20; - border: 1px solid #584b20; + color: rgb(88, 75, 32); + border: 1px solid rgb(88, 75, 32); } span.skipped { - color: #1d5068; - border: 1px solid #1d5068; + color: rgb(29, 80, 104); + border: 1px solid rgb(29, 80, 104); } span.good { - color: #295730; - border: 1px solid #295730; + color: rgb(41, 87, 48); + border: 1px solid rgb(41, 87, 48); } span.qual { - color: #765100; - border: 1px solid #765100; + color: rgb(118, 81, 0); + border: 1px solid rgb(118, 81, 0); } span.wanted { - color: #890000; - border: 1px solid #890000; + color: rgb(137, 0, 0); + border: 1px solid rgb(137, 0, 0); } span.snatched { - color: #652164; - border: 1px solid #652164; + color: rgb(101, 33, 100); + border: 1px solid rgb(101, 33, 100); } span.unaired b, @@ -1103,7 +1110,7 @@ span.good b, span.qual b, span.wanted b, span.snatched b { - color: #000000; + color: rgb(0, 0, 0); font-weight: 800; } @@ -1130,17 +1137,17 @@ span.snatched b { text-align: center; border: none; empty-cells: show; - color: #000 !important; + color: rgb(0, 0, 0) !important; } .displayShowTable.display_show { - clear:both + clear:both; } .displayShowTable th.row-seasonheader { border: none !important; - background-color: #222 !important; - color: #fff !important; + background-color: rgb(34, 34, 34) !important; + color: rgb(255, 255, 255) !important; padding-top: 15px !important; text-align: left !important; } @@ -1160,27 +1167,27 @@ span.snatched b { } .sickbeardTable.display_show { - clear:both + clear:both; } .sickbeardTable th{ - color: #fff; + color: rgb(255, 255, 255); text-align: center; - background-color: #333; + background-color: rgb(51, 51, 51); white-space: nowrap; } .sickbeardTable th, .sickbeardTable td { - border-top: 1px solid #fff; - border-left: 1px solid #fff; + border-top: 1px solid rgb(255, 255, 255); + border-left: 1px solid rgb(255, 255, 255); padding: 4px; } th.row-seasonheader { border: none; - background-color: #fff; - color: #000; + background-color: rgb(255, 255, 255); + color: rgb(0, 0, 0); padding-top: 15px; text-align: left; } @@ -1190,7 +1197,7 @@ tr.seasonheader { padding-top: 10px; text-align: left; border: none; - color: #fff; + color: rgb(255, 255, 255); } th.col-checkbox, @@ -1320,39 +1327,39 @@ schedule.mako } .listing-default { - background-color: #f5f1e4; + background-color: rgb(245, 241, 228); } .listing-current { - background-color: #dfd; + background-color: rgb(221, 255, 221); } .listing-overdue { - background-color: #fdd; + background-color: rgb(255, 221, 221); } .listing-toofar { - background-color: #bedeed; + background-color: rgb(190, 222, 237); } span.listing-default { - color: #826f30; - border: 1px solid #826f30; + color: rgb(130, 111, 48); + border: 1px solid rgb(130, 111, 48); } span.listing-current { - color: #295730; - border: 1px solid #295730; + color: rgb(41, 87, 48); + border: 1px solid rgb(41, 87, 48); } span.listing-overdue { - color: #890000; - border: 1px solid #890000; + color: rgb(137, 0, 0); + border: 1px solid rgb(137, 0, 0); } span.listing-toofar { - color: #1d5068; - border: 1px solid #1d5068; + color: rgb(29, 80, 104); + border: 1px solid rgb(29, 80, 104); } h2.day, h2.network { @@ -1361,28 +1368,28 @@ h2.day, h2.network { line-height: 36px; font-weight: bold; letter-spacing: 1px; - color: #FFF; + color: rgb(255, 255, 255); text-align: center; - text-shadow: -1px -1px 0px rgba(0, 0, 0, 0.3); - background-color: #333; + text-shadow: -1px -1px 0 rgba(0, 0, 0, 0.3); + background-color: rgb(51, 51, 51); } .tvshowDiv { display: block; clear: both; - border: 1px solid #ccc; + border: 1px solid rgb(204, 204, 204); margin: auto; - padding: 0px; + padding: 0; text-align: left; width: 750px; border-radius: 5px; - background: #fff; + background: rgb(255, 255, 255); cursor: default; overflow: hidden; } .tvshowDiv a:hover { - color: #428BCA; + color: rgb(66, 139, 202); } .tvshowDiv a, .tvshowDiv a:link, .tvshowDiv a:visited, .tvshowDiv a:hover { @@ -1391,11 +1398,11 @@ h2.day, h2.network { } .tvshowTitle a { - color: #000000; + color: rgb(0, 0, 0); float: left; line-height: 1.4em; font-size: 1.4em; - text-shadow: -1px -1px 0 #FFF; + text-shadow: -1px -1px 0 rgb(255, 255, 255); } .tvshowTitleIcons { @@ -1410,28 +1417,28 @@ h2.day, h2.network { .tvshowDiv td.next_episode { width: 100%; height: 90%; - border-bottom: 1px solid #ccc; + border-bottom: 1px solid rgb(204, 204, 204); vertical-align: top; - color: #000; + color: rgb(0, 0, 0); } .bannerThumb { vertical-align: top; height: auto; width: 748px; - border-bottom: 1px solid #ccc; + border-bottom: 1px solid rgb(204, 204, 204); } .posterThumb { vertical-align: top; height: auto; width: 180px; - border-right: 1px solid #ccc; + border-right: 1px solid rgb(204, 204, 204); } .ep_listing { width: auto; - border: 1px solid #ccc; + border: 1px solid rgb(204, 204, 204); margin-bottom: 10px; padding: 10px; } @@ -1454,7 +1461,7 @@ h2.day, h2.network { .calendarWrapper { width:1000px; margin:0 auto; - padding:0 3px + padding:0 3px; } .calendarTable { @@ -1465,20 +1472,20 @@ h2.day, h2.network { } .calendarShow { - padding:0 !important + padding:0 !important; } .calendarShow .poster { - padding-bottom:2px + padding-bottom:2px; } .calendarShow .poster img { width:142px; - height:auto + height:auto; } .calendarShow .text { - padding:0 5px 10px 5px + padding:0 5px 10px 5px; } .calendarShow .text .airtime, @@ -1486,12 +1493,12 @@ h2.day, h2.network { overflow:hidden; text-overflow:ellipsis; display:block; - font-size:11px + font-size:11px; } .calendarShow .show-status { padding:5px 10px 10px; - text-align:center + text-align:center; } /* ======================================================================= @@ -1509,12 +1516,12 @@ config*.mako .component-group { padding: 15px 15px 25px; - border-bottom: 1px dotted #ccc; + border-bottom: 1px dotted rgb(204, 204, 204); min-height: 200px; } .component-item { - border-bottom: 1px dotted #666; + border-bottom: 1px dotted rgb(102, 102, 102); min-height: 200px; } @@ -1530,11 +1537,11 @@ config*.mako .component-group-desc p { width: 90%; margin: 10px 0; - color: #666; + color: rgb(102, 102, 102); } #config div.field-pair { - padding: 12px 0px; + padding: 12px 0; } #config div.field-pair select, @@ -1563,11 +1570,11 @@ config*.mako font-weight: normal; display:block; width:475px; - margin-left:182px + margin-left:182px; } #config label.space-right { - margin-right:10px + margin-right:10px; } #config .metadataDiv { @@ -1592,7 +1599,7 @@ select .selected { padding: 5px; margin-bottom: 10px; line-height: 20px; - border: 1px dotted #CCC; + border: 1px dotted rgb(204, 204, 204); } #providerOrderList { @@ -1616,16 +1623,16 @@ select .selected { } #provider_order_list .ui-state-default.torrent-provider { - background-color: #FFFFFF !important; + background-color: rgb(255, 255, 255) !important; } #provider_order_list .ui-state-default.nzb-provider { - background-color: #DDD !important; + background-color: rgb(221, 221, 221) !important; } #provider_order_list input, #service_order_list input { - margin: 0px 2px; + margin: 0 2px; } #config .tip_scale label span.component-title { @@ -1645,7 +1652,7 @@ select .selected { } .infoTableSeperator { - border-top: 1px dotted #666666; + border-top: 1px dotted rgb(102, 102, 102); } .infoTableHeader { @@ -1726,67 +1733,67 @@ select .selected { } .add-client-icon-sabnzbd { - background-position: 0px 0px; + background-position: 0 0; } .add-client-icon-nzbget { - background-position: -34px 0px; + background-position: -34px 0; } .add-client-icon-blackhole { - background-position: -71px 0px; + background-position: -71px 0; } .add-client-icon-deluge { - background-position: -106px 0px; + background-position: -106px 0; } .add-client-icon-deluged { - background-position: -106px 0px; + background-position: -106px 0; } .add-client-icon-qbittorrent { - background-position: -138px 0px; + background-position: -138px 0; } .add-client-icon-rtorrent { - background-position: -172px 0px; + background-position: -172px 0; } .add-client-icon-download-station { - background-position: -205px 0px; + background-position: -205px 0; } .add-client-icon-transmission { - background-position: -241px 0px; + background-position: -241px 0; } .add-client-icon-utorrent { - background-position: -273px 0px; + background-position: -273px 0; } .add-client-icon-spotnet { - background-position: -311px 0px; + background-position: -311px 0; } .add-client-icon-mlnet { - background-position: -344px 0px; + background-position: -344px 0; } .add-client-icon-rss { - background-position: -380px 0px; + background-position: -380px 0; } .add-client-icon-folder { - background-position: -416px 0px; + background-position: -416px 0; } .add-client-icon-ftp { - background-position: -452px 0px; + background-position: -452px 0; } .add-client-icon-irc { - background-position: -488px 0px; + background-position: -488px 0; } /* ======================================================================= @@ -1794,15 +1801,15 @@ config_postProcessing.mako ========================================================================== */ #config div.example { - padding: 10px; background-color: #efefef; + padding: 10px; background-color: rgb(239, 239, 239); } .Key { width: 100%; padding: 6px; font-size: 13px; - background-color: #f4f4f4; - border: 1px solid #ccc; + background-color: rgb(244, 244, 244); + border: 1px solid rgb(204, 204, 204); border-collapse: collapse; border-spacing: 0; } @@ -1810,9 +1817,9 @@ config_postProcessing.mako .Key th, .tableHeader { padding: 3px 9px; margin: 0; - color: #fff; + color: rgb(255, 255, 255); text-align: center; - background: none repeat scroll 0 0 #666; + background: none repeat scroll 0 0 rgb(102, 102, 102); } .Key td { @@ -1820,11 +1827,11 @@ config_postProcessing.mako } .Key tr { - border-bottom: 1px solid #ccc; + border-bottom: 1px solid rgb(204, 204, 204); } .Key tr.even { - background-color: #dfdede; + background-color: rgb(223, 222, 222); } .legend { @@ -1849,13 +1856,13 @@ div.metadata_example_wrapper { div.metadata_options { padding: 7px; overflow: auto; - background: #f5f1e4; - border: 1px solid #ccc; + background: rgb(245, 241, 228); + border: 1px solid rgb(204, 204, 204); } div.metadata_options label:hover { - color: #fff; - background-color: #57442b; + color: rgb(255, 255, 255); + background-color: rgb(87, 68, 43); cursor: pointer; } @@ -1863,7 +1870,7 @@ div.metadata_options label { display: block; padding-left: 7px; line-height: 20px; - color: #036; + color: rgb(0, 51, 102); } div.metadata_example { @@ -1873,22 +1880,22 @@ div.metadata_example { div.metadata_example label { display: block; line-height: 21px; - color: #000; + color: rgb(0, 0, 0); cursor: pointer; } div.metadataDiv .disabled { - color: #ccc; + color: rgb(204, 204, 204); } .notifier-icon { float: left; - margin: 6px 4px 0px 0px; + margin: 6px 4px 0 0; } .warning { - border-color: #F89406; - background: url("../images/warning16.png") no-repeat right 5px center #fff; + border-color: rgb(248, 148, 6); + background: url("../images/warning16.png") no-repeat right 5px center rgb(255, 255, 255); } [class^="icon-notifiers-"], [class*=" icon-notifiers-"] { @@ -1902,95 +1909,95 @@ div.metadataDiv .disabled { } .icon-notifiers-kodi { - background-position: 0px 0px; + background-position: 0 0; } .icon-notifiers-plex { - background-position: -35px 0px; + background-position: -35px 0; } .icon-notifiers-plexth { - background-position: -69px 0px; + background-position: -69px 0; } .icon-notifiers-emby { - background-position: -104px 0px; + background-position: -104px 0; } .icon-notifiers-nmj { - background-position: -136px 0px; + background-position: -136px 0; } .icon-notifiers-syno1 { - background-position: -168px 0px; + background-position: -168px 0; } .icon-notifiers-syno2 { - background-position: -202px 0px; + background-position: -202px 0; } .icon-notifiers-pytivo { - background-position: -237px 0px; + background-position: -237px 0; } .icon-notifiers-growl { - background-position: -272px 0px; + background-position: -272px 0; } .icon-notifiers-prowl { - background-position: -308px 0px; + background-position: -308px 0; } .icon-notifiers-libnotify { - background-position: -345px 0px; + background-position: -345px 0; } .icon-notifiers-pushover { - background-position: -377px 0px; + background-position: -377px 0; } .icon-notifiers-boxcar2 { - background-position: -414px 0px; + background-position: -414px 0; } .icon-notifiers-nma { - background-position: -450px 0px; + background-position: -450px 0; } .icon-notifiers-pushalot { - background-position: -486px 0px; + background-position: -486px 0; } .icon-notifiers-pushbullet { - background-position: -519px 0px; + background-position: -519px 0; } .icon-notifiers-freemobile { - background-position: -551px 0px; + background-position: -551px 0; } .icon-notifiers-telegram { - background-position: -587px 0px; + background-position: -587px 0; } .icon-notifiers-twitter { - background-position: -624px 0px; + background-position: -624px 0; } .icon-notifiers-trakt { - background-position: -659px 0px; + background-position: -659px 0; } .icon-notifiers-email { - background-position: -695px 0px; + background-position: -695px 0; } .icon-notifiers-anime { - background-position: -733px 0px; + background-position: -733px 0; } .icon-notifiers-look { - background-position: -769px 0px; + background-position: -769px 0; } /* ======================================================================= @@ -2022,7 +2029,7 @@ td.tableright { text-align: left; vertical-align: middle; width: 225px; - padding: 6px 0px; + padding: 6px 0; } .optionWrapper div.selectChoices { @@ -2043,11 +2050,11 @@ td.tableright { .separator { font-size: 90%; - color: #333333; + color: rgb(51, 51, 51); } a.whitelink { - color: #fff; + color: rgb(255, 255, 255); } /* ======================================================================= @@ -2056,8 +2063,8 @@ Global span.path { padding: 3px 6px; - color: #8b0000; - background-color: #f5f1e4; + color: rgb(139, 0, 0); + background-color: rgb(245, 241, 228); } .align-left { @@ -2077,8 +2084,8 @@ span.quality { background-image:linear-gradient(to bottom, rgba(255,255,255,0.08),rgba(255,255,255,0) 50%,rgba(0,0,0,0) 50%,rgba(0,0,0,0.25)); -webkit-box-shadow:inset 0 1px rgba(255,255,255,0.1),inset 0 -1px 3px rgba(0,0,0,0.3),inset 0 0 0 1px rgba(255,255,255,0.08),0 1px 2px rgba(0,0,0,0.15); box-shadow:inset 0 1px rgba(255,255,255,0.1),inset 0 -1px 3px rgba(0,0,0,0.3),inset 0 0 0 1px rgba(255,255,255,0.08),0 1px 2px rgba(0,0,0,0.15); - text-shadow: 0px 1px rgba(0, 0, 0, 0.8); - color: #FFFFFF; + text-shadow: 0 1px rgba(0, 0, 0, 0.8); + color: rgb(255, 255, 255); display: inline-block; padding: 2px 4px; text-align: center; @@ -2090,89 +2097,108 @@ span.quality { } span.any-hd { - background-color: #2672b6; + background-color: rgb(38, 114, 182); background: repeating-linear-gradient( -45deg, - #2672b6, - #2672b6 10px, - #5b990d 10px, - #5b990d 20px + rgb(38, 114, 182), + rgb(38, 114, 182) 10px, + rgb(91, 153, 13) 10px, + rgb(91, 153, 13) 20px ); } span.Custom { - background-color: #621993; + background-color: rgb(98, 25, 147); } span.HD { - background-color: #2672B6; + background-color: rgb(38, 114, 182); } span.HDTV { - background-color: #2672B6; + background-color: rgb(38, 114, 182); } span.HD720p { - background-color: #5b990d; + background-color: rgb(91, 153, 13); } span.HD1080p { - background-color: #2672B6; + background-color: rgb(38, 114, 182); } span.UHD-4K { - background-color: #7500FF; + background-color: rgb(117, 0, 255); } span.UHD-8K { - background-color: #410077; + background-color: rgb(65, 0, 119); } span.RawHD { - background-color: #cd7300; + background-color: rgb(205, 115, 0); } span.RawHDTV { - background-color: #cd7300; + background-color: rgb(205, 115, 0); } span.SD { - background-color: #BE2625; + background-color: rgb(190, 38, 37); } span.SDTV { - background-color: #BE2625; + background-color: rgb(190, 38, 37); } span.SDDVD { - background-color: #BE2625; + background-color: rgb(190, 38, 37); } span.Any { - background-color: #666; + background-color: rgb(102, 102, 102); } span.Unknown { - background-color: #999; + background-color: rgb(153, 153, 153); } span.Proper { - background-color: #3F7F00; + background-color: rgb(63, 127, 0); } span.false { - color: #993333; + color: rgb(153, 51, 51); /* red */ } span.true { - color: #669966; + color: rgb(102, 153, 102); /* green */ } span.break-word { word-break: break-all; } +span.required { + color: green; +} +span.preferred { + color: blue; +} +span.undesired { + color: orange; +} +span.ignored { + color: red; +} + +a.wiki { + color: red; +} +a.wiki strong{ + color: red; +} option.flag { padding-left: 35px; @@ -2212,7 +2238,7 @@ div.blackwhitelist{ } div.blackwhitelist input { - margin: 5px 0px; + margin: 5px 0; } div.blackwhitelist.pool select{ @@ -2229,7 +2255,7 @@ div.blackwhitelist span { } div.blackwhitelist.anidb, div.blackwhitelist.manual { - margin: 7px 0px; + margin: 7px 0; } /* ======================================================================= @@ -2240,7 +2266,7 @@ body { padding-top: 60px; overflow-y: scroll; font-family: "Open Sans", "Helvetica Neue", Helvetica, Arial, sans-serif; - color: #000; + color: rgb(0, 0, 0); } html * { @@ -2248,18 +2274,18 @@ html * { } input[type="checkbox"] { - margin: 2px 0px 0px; + margin: 2px 0 0; line-height: normal; } input[type="radio"] { - margin: 2px 0px 0px; + margin: 2px 0 0; line-height: normal; } input, textarea, select, .uneditable-input { width: auto; - color: #000; + color: rgb(0, 0, 0); } .container-fluid { @@ -2268,65 +2294,65 @@ input, textarea, select, .uneditable-input { } .navbar-brand { - padding: 0px; + padding: 0; } /* navbar styling */ .navbar-default .navbar-brand { - color: #ffffff; + color: rgb(255, 255, 255); } .navbar-default .navbar-brand:hover, .navbar-default .navbar-brand:focus { - color: #ffffff; + color: rgb(255, 255, 255); background-color: transparent; } .navbar-default .navbar-text { - color: #dddddd; + color: rgb(221, 221, 221); } .navbar-default .navbar-nav > li > a { - color: #dddddd; + color: rgb(221, 221, 221); } .navbar-default .navbar-nav > li > a:hover, .navbar-default .navbar-nav > li > a:focus { - color: #ffffff; + color: rgb(255, 255, 255); } .navbar-default .navbar-nav > .active > a, .navbar-default .navbar-nav > .active > a:hover, .navbar-default .navbar-nav > .active > a:focus { - color: #ffffff; - background-color: #333333; + color: rgb(255, 255, 255); + background-color: rgb(51, 51, 51); } .navbar-default .navbar-nav > .disabled > a, .navbar-default .navbar-nav > .disabled > a:hover, .navbar-default .navbar-nav > .disabled > a:focus { - color: #cccccc; + color: rgb(204, 204, 204); background-color: transparent; } .navbar-default .navbar-toggle { - border-color: #cccccc; + border-color: rgb(204, 204, 204); } .navbar-default .navbar-toggle:hover, .navbar-default .navbar-toggle:focus { - background-color: #333333; + background-color: rgb(51, 51, 51); } .navbar-default .navbar-toggle .icon-bar { - background-color: #333333; + background-color: rgb(51, 51, 51); } .navbar-default .navbar-nav > .open > a, .navbar-default .navbar-nav > .open > a:hover, .navbar-default .navbar-nav > .open > a:focus { - background-color: #333333; - color: #ffffff; + background-color: rgb(51, 51, 51); + color: rgb(255, 255, 255); } .navbar-default .navbar-nav > li.navbar-split > a { @@ -2344,55 +2370,55 @@ input, textarea, select, .uneditable-input { @media (max-width: 767px) { .navbar-default .navbar-nav .open .dropdown-menu > li > a { - color: #dddddd; + color: rgb(221, 221, 221); } .navbar-default .navbar-nav .open .dropdown-menu > li > a:hover, .navbar-default .navbar-nav .open .dropdown-menu > li > a:focus { - color: #ffffff; + color: rgb(255, 255, 255); background-color: transparent; } .navbar-default .navbar-nav .open .dropdown-menu > .active > a, .navbar-default .navbar-nav .open .dropdown-menu > .active > a:hover, .navbar-default .navbar-nav .open .dropdown-menu > .active > a:focus { - color: #ffffff; - background-color: #333333; + color: rgb(255, 255, 255); + background-color: rgb(51, 51, 51); } .navbar-default .navbar-nav .open .dropdown-menu > .disabled > a, .navbar-default .navbar-nav .open .dropdown-menu > .disabled > a:hover, .navbar-default .navbar-nav .open .dropdown-menu > .disabled > a:focus { - color: #cccccc; + color: rgb(204, 204, 204); background-color: transparent; } } .navbar-default .navbar-link { - color: #dddddd; + color: rgb(221, 221, 221); } .navbar-default .navbar-link:hover { - color: #ffffff; + color: rgb(255, 255, 255); } .navbar-default .btn-link { - color: #98978b; + color: rgb(152, 151, 139); } .navbar-default .btn-link:hover, .navbar-default .btn-link:focus { - color: #dddddd; + color: rgb(221, 221, 221); } .navbar-default .btn-link[disabled]:hover, fieldset[disabled] .navbar-default .btn-link:hover, .navbar-default .btn-link[disabled]:focus, fieldset[disabled] .navbar-default .btn-link:focus { - color: #cccccc; + color: rgb(204, 204, 204); } .dropdown-menu > li > a:hover, .dropdown-menu > li > a:focus { - color: #262626; + color: rgb(38, 38, 38); text-decoration: none; - background-color: #F5F5F5; + background-color: rgb(245, 245, 245); } .dropdown-menu > li > a { @@ -2400,13 +2426,13 @@ fieldset[disabled] .navbar-default .btn-link:focus { } .dropdown-menu { - background-color: #F5F1E4; + background-color: rgb(245, 241, 228); border: 1px solid rgba(0, 0, 0, 0.15); - box-shadow: 0px 6px 12px rgba(0, 0, 0, 0.176); + box-shadow: 0 6px 12px rgba(0, 0, 0, 0.176); } .form-control { - color: #000000; + color: rgb(0, 0, 0); } .form-control-inline { @@ -2424,25 +2450,25 @@ fieldset[disabled] .navbar-default .btn-link:focus { font-size: 12px; line-height: 16px; *line-height: 20px; - color: #333333; + color: rgb(51, 51, 51); text-align: center; text-shadow: 0 1px 1px rgba(255, 255, 255, 0.75); vertical-align: middle; cursor: pointer; - background-color: #f5f5f5; - *background-color: #e6e6e6; - background-image: -ms-linear-gradient(top, #ffffff, #e6e6e6); - background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#ffffff), to(#e6e6e6)); - background-image: -webkit-linear-gradient(top, #ffffff, #e6e6e6); - background-image: -o-linear-gradient(top, #ffffff, #e6e6e6); - background-image: linear-gradient(top, #ffffff, #e6e6e6); - background-image: -moz-linear-gradient(top, #ffffff, #e6e6e6); + background-color: rgb(245, 245, 245); + *background-color: rgb(230, 230, 230); + background-image: -ms-linear-gradient(top, rgb(255, 255, 255), rgb(230, 230, 230)); + background-image: -webkit-gradient(linear, 0 0, 0 100%, from(rgb(255, 255, 255)), to(rgb(230, 230, 230))); + background-image: -webkit-linear-gradient(top, rgb(255, 255, 255), rgb(230, 230, 230)); + background-image: -o-linear-gradient(top, rgb(255, 255, 255), rgb(230, 230, 230)); + background-image: linear-gradient(top, rgb(255, 255, 255), rgb(230, 230, 230)); + background-image: -moz-linear-gradient(top, rgb(255, 255, 255), rgb(230, 230, 230)); background-repeat: repeat-x; - border: 1px solid #cccccc; + border: 1px solid rgb(204, 204, 204); *border: 0; border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); - border-color: #e6e6e6 #e6e6e6 #bfbfbf; - border-bottom-color: #b3b3b3; + border-color: rgb(230, 230, 230) rgb(230, 230, 230) rgb(191, 191, 191); + border-bottom-color: rgb(179, 179, 179); -webkit-border-radius: 4px; -moz-border-radius: 4px; border-radius: 4px; @@ -2459,13 +2485,13 @@ fieldset[disabled] .navbar-default .btn-link:focus { .btn.active, .btn.disabled, .btn[disabled] { - background-color: #e6e6e6; - *background-color: #d9d9d9; + background-color: rgb(230, 230, 230); + *background-color: rgb(217, 217, 217); } .btn:active, .btn.active { - background-color: #cccccc \9; + background-color: rgb(204, 204, 204) \9; } .btn:first-child { @@ -2473,10 +2499,10 @@ fieldset[disabled] .navbar-default .btn-link:focus { } .btn:hover { - color: #333333; + color: rgb(51, 51, 51); text-decoration: none; - background-color: #e6e6e6; - *background-color: #d9d9d9; + background-color: rgb(230, 230, 230); + *background-color: rgb(217, 217, 217); background-position: 0 -15px; -webkit-transition: background-position 0.1s linear; -moz-transition: background-position 0.1s linear; @@ -2486,15 +2512,15 @@ fieldset[disabled] .navbar-default .btn-link:focus { } .btn:focus { - outline: thin dotted #333; + outline: thin dotted rgb(51, 51, 51); outline: 5px auto -webkit-focus-ring-color; outline-offset: -2px; } .btn.active, .btn:active { - background-color: #e6e6e6; - background-color: #d9d9d9 \9; + background-color: rgb(230, 230, 230); + background-color: rgb(217, 217, 217) \9; background-image: none; outline: 0; -webkit-box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05); @@ -2505,7 +2531,7 @@ fieldset[disabled] .navbar-default .btn-link:focus { .btn.disabled, .btn[disabled] { cursor: default; - background-color: #e6e6e6; + background-color: rgb(230, 230, 230); background-image: none; opacity: 0.65; filter: alpha(opacity=65); @@ -2555,7 +2581,7 @@ fieldset[disabled] .navbar-default .btn-link:focus { .btn-info:hover, .btn-inverse, .btn-inverse:hover { - color: #ffffff; + color: rgb(255, 255, 255); text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25); } @@ -2569,16 +2595,16 @@ fieldset[disabled] .navbar-default .btn-link:focus { } .btn-primary { - background-color: #0074cc; - *background-color: #0055cc; - background-image: -ms-linear-gradient(top, #0088cc, #0055cc); - background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#0088cc), to(#0055cc)); - background-image: -webkit-linear-gradient(top, #0088cc, #0055cc); - background-image: -o-linear-gradient(top, #0088cc, #0055cc); - background-image: -moz-linear-gradient(top, #0088cc, #0055cc); - background-image: linear-gradient(top, #0088cc, #0055cc); + background-color: rgb(0, 116, 204); + *background-color: rgb(0, 85, 204); + background-image: -ms-linear-gradient(top, rgb(0, 136, 204), rgb(0, 85, 204)); + background-image: -webkit-gradient(linear, 0 0, 0 100%, from(rgb(0, 136, 204)), to(rgb(0, 85, 204))); + background-image: -webkit-linear-gradient(top, rgb(0, 136, 204), rgb(0, 85, 204)); + background-image: -o-linear-gradient(top, rgb(0, 136, 204), rgb(0, 85, 204)); + background-image: -moz-linear-gradient(top, rgb(0, 136, 204), rgb(0, 85, 204)); + background-image: linear-gradient(top, rgb(0, 136, 204), rgb(0, 85, 204)); background-repeat: repeat-x; - border-color: #0055cc #0055cc #003580; + border-color: rgb(0, 85, 204) rgb(0, 85, 204) rgb(0, 53, 128); border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); filter: progid:dximagetransform.microsoft.gradient(startColorstr='#0088cc', endColorstr='#0055cc', GradientType=0); filter: progid:dximagetransform.microsoft.gradient(enabled=false); @@ -2589,26 +2615,26 @@ fieldset[disabled] .navbar-default .btn-link:focus { .btn-primary.active, .btn-primary.disabled, .btn-primary[disabled] { - background-color: #0055cc; - *background-color: #004ab3; + background-color: rgb(0, 85, 204); + *background-color: rgb(0, 74, 179); } .btn-primary:active, .btn-primary.active { - background-color: #004099 \9; + background-color: rgb(0, 64, 153) \9; } .btn-warning { - background-color: #faa732; - *background-color: #f89406; - background-image: -ms-linear-gradient(top, #fbb450, #f89406); - background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#fbb450), to(#f89406)); - background-image: -webkit-linear-gradient(top, #fbb450, #f89406); - background-image: -o-linear-gradient(top, #fbb450, #f89406); - background-image: -moz-linear-gradient(top, #fbb450, #f89406); - background-image: linear-gradient(top, #fbb450, #f89406); + background-color: rgb(250, 167, 50); + *background-color: rgb(248, 148, 6); + background-image: -ms-linear-gradient(top, rgb(251, 180, 80), rgb(248, 148, 6)); + background-image: -webkit-gradient(linear, 0 0, 0 100%, from(rgb(251, 180, 80)), to(rgb(248, 148, 6))); + background-image: -webkit-linear-gradient(top, rgb(251, 180, 80), rgb(248, 148, 6)); + background-image: -o-linear-gradient(top, rgb(251, 180, 80), rgb(248, 148, 6)); + background-image: -moz-linear-gradient(top, rgb(251, 180, 80), rgb(248, 148, 6)); + background-image: linear-gradient(top, rgb(251, 180, 80), rgb(248, 148, 6)); background-repeat: repeat-x; - border-color: #f89406 #f89406 #ad6704; + border-color: rgb(248, 148, 6) rgb(248, 148, 6) rgb(173, 103, 4); border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); filter: progid:dximagetransform.microsoft.gradient(startColorstr='#fbb450', endColorstr='#f89406', GradientType=0); filter: progid:dximagetransform.microsoft.gradient(enabled=false); @@ -2619,26 +2645,26 @@ fieldset[disabled] .navbar-default .btn-link:focus { .btn-warning.active, .btn-warning.disabled, .btn-warning[disabled] { - background-color: #f89406; - *background-color: #df8505; + background-color: rgb(248, 148, 6); + *background-color: rgb(223, 133, 5); } .btn-warning:active, .btn-warning.active { - background-color: #c67605 \9; + background-color: rgb(198, 118, 5) \9; } .btn-danger { - background-color: #da4f49; - *background-color: #bd362f; - background-image: -ms-linear-gradient(top, #ee5f5b, #bd362f); - background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#ee5f5b), to(#bd362f)); - background-image: -webkit-linear-gradient(top, #ee5f5b, #bd362f); - background-image: -o-linear-gradient(top, #ee5f5b, #bd362f); - background-image: -moz-linear-gradient(top, #ee5f5b, #bd362f); - background-image: linear-gradient(top, #ee5f5b, #bd362f); + background-color: rgb(218, 79, 73); + *background-color: rgb(189, 54, 47); + background-image: -ms-linear-gradient(top, rgb(238, 95, 91), rgb(189, 54, 47)); + background-image: -webkit-gradient(linear, 0 0, 0 100%, from(rgb(238, 95, 91)), to(rgb(189, 54, 47))); + background-image: -webkit-linear-gradient(top, rgb(238, 95, 91), rgb(189, 54, 47)); + background-image: -o-linear-gradient(top, rgb(238, 95, 91), rgb(189, 54, 47)); + background-image: -moz-linear-gradient(top, rgb(238, 95, 91), rgb(189, 54, 47)); + background-image: linear-gradient(top, rgb(238, 95, 91), rgb(189, 54, 47)); background-repeat: repeat-x; - border-color: #bd362f #bd362f #802420; + border-color: rgb(189, 54, 47) rgb(189, 54, 47) rgb(128, 36, 32); border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); filter: progid:dximagetransform.microsoft.gradient(startColorstr='#ee5f5b', endColorstr='#bd362f', GradientType=0); filter: progid:dximagetransform.microsoft.gradient(enabled=false); @@ -2649,26 +2675,26 @@ fieldset[disabled] .navbar-default .btn-link:focus { .btn-danger.active, .btn-danger.disabled, .btn-danger[disabled] { - background-color: #bd362f; - *background-color: #a9302a; + background-color: rgb(189, 54, 47); + *background-color: rgb(169, 48, 42); } .btn-danger:active, .btn-danger.active { - background-color: #942a25 \9; + background-color: rgb(148, 42, 37) \9; } .btn-success { - background-color: #5bb75b; - *background-color: #51a351; - background-image: -ms-linear-gradient(top, #62c462, #51a351); - background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#62c462), to(#51a351)); - background-image: -webkit-linear-gradient(top, #62c462, #51a351); - background-image: -o-linear-gradient(top, #62c462, #51a351); - background-image: -moz-linear-gradient(top, #62c462, #51a351); - background-image: linear-gradient(top, #62c462, #51a351); + background-color: rgb(91, 183, 91); + *background-color: rgb(81, 163, 81); + background-image: -ms-linear-gradient(top, rgb(98, 196, 98), rgb(81, 163, 81)); + background-image: -webkit-gradient(linear, 0 0, 0 100%, from(rgb(98, 196, 98)), to(rgb(81, 163, 81))); + background-image: -webkit-linear-gradient(top, rgb(98, 196, 98), rgb(81, 163, 81)); + background-image: -o-linear-gradient(top, rgb(98, 196, 98), rgb(81, 163, 81)); + background-image: -moz-linear-gradient(top, rgb(98, 196, 98), rgb(81, 163, 81)); + background-image: linear-gradient(top, rgb(98, 196, 98), rgb(81, 163, 81)); background-repeat: repeat-x; - border-color: #51a351 #51a351 #387038; + border-color: rgb(81, 163, 81) rgb(81, 163, 81) rgb(56, 112, 56); border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); filter: progid:dximagetransform.microsoft.gradient(startColorstr='#62c462', endColorstr='#51a351', GradientType=0); filter: progid:dximagetransform.microsoft.gradient(enabled=false); @@ -2679,26 +2705,26 @@ fieldset[disabled] .navbar-default .btn-link:focus { .btn-success.active, .btn-success.disabled, .btn-success[disabled] { - background-color: #51a351; - *background-color: #499249; + background-color: rgb(81, 163, 81); + *background-color: rgb(73, 146, 73); } .btn-success:active, .btn-success.active { - background-color: #408140 \9; + background-color: rgb(64, 129, 64) \9; } .btn-info { - background-color: #49afcd; - *background-color: #2f96b4; - background-image: -ms-linear-gradient(top, #5bc0de, #2f96b4); - background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#5bc0de), to(#2f96b4)); - background-image: -webkit-linear-gradient(top, #5bc0de, #2f96b4); - background-image: -o-linear-gradient(top, #5bc0de, #2f96b4); - background-image: -moz-linear-gradient(top, #5bc0de, #2f96b4); - background-image: linear-gradient(top, #5bc0de, #2f96b4); + background-color: rgb(73, 175, 205); + *background-color: rgb(47, 150, 180); + background-image: -ms-linear-gradient(top, rgb(91, 192, 222), rgb(47, 150, 180)); + background-image: -webkit-gradient(linear, 0 0, 0 100%, from(rgb(91, 192, 222)), to(rgb(47, 150, 180))); + background-image: -webkit-linear-gradient(top, rgb(91, 192, 222), rgb(47, 150, 180)); + background-image: -o-linear-gradient(top, rgb(91, 192, 222), rgb(47, 150, 180)); + background-image: -moz-linear-gradient(top, rgb(91, 192, 222), rgb(47, 150, 180)); + background-image: linear-gradient(top, rgb(91, 192, 222), rgb(47, 150, 180)); background-repeat: repeat-x; - border-color: #2f96b4 #2f96b4 #1f6377; + border-color: rgb(47, 150, 180) rgb(47, 150, 180) rgb(31, 99, 119); border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); filter: progid:dximagetransform.microsoft.gradient(startColorstr='#5bc0de', endColorstr='#2f96b4', GradientType=0); filter: progid:dximagetransform.microsoft.gradient(enabled=false); @@ -2709,26 +2735,26 @@ fieldset[disabled] .navbar-default .btn-link:focus { .btn-info.active, .btn-info.disabled, .btn-info[disabled] { - background-color: #2f96b4; - *background-color: #2a85a0; + background-color: rgb(47, 150, 180); + *background-color: rgb(42, 133, 160); } .btn-info:active, .btn-info.active { - background-color: #24748c \9; + background-color: rgb(36, 116, 140) \9; } .btn-inverse { - background-color: #414141; - *background-color: #222222; - background-image: -ms-linear-gradient(top, #555555, #222222); - background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#555555), to(#222222)); - background-image: -webkit-linear-gradient(top, #555555, #222222); - background-image: -o-linear-gradient(top, #555555, #222222); - background-image: -moz-linear-gradient(top, #555555, #222222); - background-image: linear-gradient(top, #555555, #222222); + background-color: rgb(65, 65, 65); + *background-color: rgb(34, 34, 34); + background-image: -ms-linear-gradient(top, rgb(85, 85, 85), rgb(34, 34, 34)); + background-image: -webkit-gradient(linear, 0 0, 0 100%, from(rgb(85, 85, 85)), to(rgb(34, 34, 34))); + background-image: -webkit-linear-gradient(top, rgb(85, 85, 85), rgb(34, 34, 34)); + background-image: -o-linear-gradient(top, rgb(85, 85, 85), rgb(34, 34, 34)); + background-image: -moz-linear-gradient(top, rgb(85, 85, 85), rgb(34, 34, 34)); + background-image: linear-gradient(top, rgb(85, 85, 85), rgb(34, 34, 34)); background-repeat: repeat-x; - border-color: #222222 #222222 #000000; + border-color: rgb(34, 34, 34) rgb(34, 34, 34) rgb(0, 0, 0); border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); filter: progid:dximagetransform.microsoft.gradient(startColorstr='#555555', endColorstr='#222222', GradientType=0); filter: progid:dximagetransform.microsoft.gradient(enabled=false); @@ -2739,13 +2765,13 @@ fieldset[disabled] .navbar-default .btn-link:focus { .btn-inverse.active, .btn-inverse.disabled, .btn-inverse[disabled] { - background-color: #222222; - *background-color: #151515; + background-color: rgb(34, 34, 34); + *background-color: rgb(21, 21, 21); } .btn-inverse:active, .btn-inverse.active { - background-color: #080808 \9; + background-color: rgb(8, 8, 8) \9; } .btn-xs { @@ -2762,7 +2788,7 @@ fieldset[disabled] .navbar-default .btn-link:focus { right: 12px; display: inline-block; border-right: 6px solid transparent; - border-bottom: 6px solid #F5F1E4; + border-bottom: 6px solid rgb(245, 241, 228); border-left: 6px solid transparent; content: ""; } @@ -2843,8 +2869,8 @@ fieldset.sectionwrap { } legend.legendStep { - color: #57442b; - margin-bottom: 0px; + color: rgb(87, 68, 43); + margin-bottom: 0; } div.stepsguide { @@ -2862,15 +2888,15 @@ div.stepsguide .step { div.stepsguide .step p { margin: 12px 0; - border-bottom: 4px solid #57442b; + border-bottom: 4px solid rgb(87, 68, 43); } div.stepsguide .disabledstep { - color: #c4c4c4; + color: rgb(196, 196, 196); } div.stepsguide .disabledstep p { - border-bottom: 4px solid #8a775e; + border-bottom: 4px solid rgb(138, 119, 94); } div.stepsguide .step .smalltext { @@ -2888,10 +2914,10 @@ div.formpaginate { div.formpaginate .prev, div.formpaginate .next { padding: 3px 6px; - color: #fff; + color: rgb(255, 255, 255); cursor: hand; cursor: pointer; - background: #57442b; + background: rgb(87, 68, 43); -webkit-border-radius: 6px; -moz-border-radius: 6px; border-radius: 6px; @@ -2949,7 +2975,7 @@ div.field-pair input { width: 85%; margin: .8em 0; font-size: 1.13em; - color: #666; + color: rgb(102, 102, 102); } /* ======================================================================= @@ -2960,17 +2986,17 @@ tablesorter.css width: 100%; margin-right: auto; margin-left: auto; - color: #000; + color: rgb(0, 0, 0); text-align: left; - background-color: #fff; + background-color: rgb(255, 255, 255); border-spacing: 0; } .tablesorter th, .tablesorter td { padding: 4px; - border-top: #fff 1px solid; - border-left: #fff 1px solid; + border-top: rgb(255, 255, 255) 1px solid; + border-left: rgb(255, 255, 255) 1px solid; vertical-align: middle; } @@ -2981,10 +3007,10 @@ tablesorter.css } .tablesorter th { - color: #fff; + color: rgb(255, 255, 255); text-align: center; text-shadow: -1px -1px 0 rgba(0,0,0,0.3); - background-color: #333; + background-color: rgb(51, 51, 51); border-collapse: collapse; font-weight: normal; } @@ -2999,13 +3025,13 @@ tablesorter.css } .tablesorter thead .tablesorter-headerDesc { - background-color: #555; + background-color: rgb(85, 85, 85); background-image: url(data:image/gif;base64,R0lGODlhFQAEAIAAAP///////yH5BAEAAAEALAAAAAAVAAQAAAINjB+gC+jP2ptn0WskLQA7); /* background-image: url(../images/tablesorter/asc.gif); */ } .tablesorter thead .tablesorter-headerAsc { - background-color: #555; + background-color: rgb(85, 85, 85); background-image: url(data:image/gif;base64,R0lGODlhFQAEAIAAAP///////yH5BAEAAAEALAAAAAAVAAQAAAINjI8Bya2wnINUMopZAQA7); /* background-image: url(../images/tablesorter/desc.gif); */ } @@ -3017,17 +3043,17 @@ tablesorter.css } thead.tablesorter-stickyHeader { - border-top: 2px solid #fff; - border-bottom: 2px solid #fff; + border-top: 2px solid rgb(255, 255, 255); + border-bottom: 2px solid rgb(255, 255, 255); } /* Zebra Widget - row alternating colors */ .tablesorter tr.odd, .sickbeardTable tr.odd { - background-color: #f5f1e4; + background-color: rgb(245, 241, 228); } .tablesorter tr.even, .sickbeardTable tr.even { - background-color: #dfdacf; + background-color: rgb(223, 218, 207); } /* filter widget */ @@ -3046,8 +3072,8 @@ thead.tablesorter-stickyHeader { .tablesorter tr.tablesorter-filter-row, .tablesorter tr.tablesorter-filter-row td { text-align: center; - background: #eee; - border-bottom: 1px solid #ddd; + background: rgb(238, 238, 238); + border-bottom: 1px solid rgb(221, 221, 221); } /* optional disabled input styling */ @@ -3061,15 +3087,15 @@ thead.tablesorter-stickyHeader { } .tablesorter tfoot tr { - color: #fff; + color: rgb(255, 255, 255); text-align: center; text-shadow: -1px -1px 0 rgba(0,0,0,0.3); - background-color: #333; + background-color: rgb(51, 51, 51); border-collapse: collapse; } .tablesorter tfoot a { - color:#fff; + color: rgb(255, 255, 255); text-decoration: none; } @@ -3082,14 +3108,14 @@ ul.token-input-list { height: auto !important; height: 1%; width: 273px; - border: 1px solid #ccc; + border: 1px solid rgb(204, 204, 204); cursor: text; font-size: 10px; - font-family: Verdana; + font-family: Verdana, sans-serif; z-index: 999; margin: 0; padding: 0 0 1px 0; - background-color: #fff; + background-color: rgb(255, 255, 255); list-style-type: none; /* clear: left; */ border-top-left-radius: 3px; @@ -3105,7 +3131,7 @@ ul.token-input-list li { ul.token-input-list li input { border: 0; padding: 3px 4px; - background-color: white; + background-color: rgb(255, 255, 255); /* -webkit-appearance: caret; */ } @@ -3115,8 +3141,8 @@ li.token-input-token { height: 1%; margin: 3px; padding: 3px 5px 0 5px; - background-color: #d0efa0; - color: #000; + background-color: rgb(208, 239, 160); + color: rgb(0, 0, 0); font-weight: bold; cursor: default; display: block; @@ -3142,17 +3168,17 @@ li.token-input-token p { li.token-input-token span { float: right; - color: #777; + color: rgb(119, 119, 119); cursor: pointer; } li.token-input-selected-token { - background-color: #08844e; - color: #fff; + background-color: rgb(8, 132, 78); + color: rgb(255, 255, 255); } li.token-input-selected-token span { - color: #bbb; + color: rgb(187, 187, 187); } li.token-input-input-token input { @@ -3168,7 +3194,7 @@ div.token-input-dropdown { border-bottom: 1px solid; cursor: default; font-size: 11px; - font-family: Verdana; + font-family: Verdana, sans-serif; z-index: 1; } @@ -3176,7 +3202,7 @@ div.token-input-dropdown p { margin: 0; padding: 3px; font-weight: bold; - color: #777; + color: rgb(119, 119, 119); } div.token-input-dropdown ul { @@ -3185,17 +3211,17 @@ div.token-input-dropdown ul { } div.token-input-dropdown ul li { - background-color: #fff; + background-color: rgb(255, 255, 255); padding: 3px; list-style-type: none; } div.token-input-dropdown ul li.token-input-dropdown-item { - background-color: #fafafa; + background-color: rgb(250, 250, 250); } div.token-input-dropdown ul li.token-input-dropdown-item2 { - background-color: #fff; + background-color: rgb(255, 255, 255); } div.token-input-dropdown ul li em { @@ -3204,21 +3230,24 @@ div.token-input-dropdown ul li em { } div.token-input-dropdown ul li.token-input-selected-dropdown-item { - background-color: #6196c2; + background-color: rgb(97, 150, 194); } span.token-input-delete-token { margin: 0 1px; } -.red-text {color:#d33} -.clear-left {clear:left} -.nextline-block {display:block} +.red-text {color: rgb(221, 51, 51); +} +.clear-left {clear:left; +} +.nextline-block {display:block; +} .trakt-image { display: block; z-index: 0; - background-image: url(/images/poster-dark.jpg) + background-image: url(/images/poster-dark.jpg); } /* ======================================================================= @@ -3238,14 +3267,14 @@ jquery.confirm.css } #confirmBox{ - background: #F5F1E4; + background: rgb(245, 241, 228); width: 460px; position: fixed; left: 50%; top: 50%; margin: -130px 0 0 -230px; - border: 1px solid #111; - box-shadow: 0px 0px 12px 0px rgba(0, 0, 0, 0.175); + border: 1px solid rgb(17, 17, 17); + box-shadow: 0 0 12px 0 rgba(0, 0, 0, 0.175); } #confirmBox h1, @@ -3254,18 +3283,18 @@ jquery.confirm.css } #confirmBox h1 { - background-color: #333; - border-bottom: 1px solid #111; - color: #fff; + background-color: rgb(51, 51, 51); + border-bottom: 1px solid rgb(17, 17, 17); + color: rgb(255, 255, 255); margin: 0; font-size: 22px; - text-shadow: 0px 1px 1px rgba(0, 0, 0, 0.75); + text-shadow: 0 1px 1px rgba(0, 0, 0, 0.75); } #confirmBox p { padding-top: 20px; - color: #000; - text-shadow: 0px 1px 1px rgba(255, 255, 255, 0.75); + color: rgb(0, 0, 0); + text-shadow: 0 1px 1px rgba(255, 255, 255, 0.75); } #confirmButtons { @@ -3278,11 +3307,11 @@ jquery.confirm.css padding: 2px 20px; text-decoration: none; display: inline-block; - color: #fff; + color: rgb(255, 255, 255); text-align:center; - text-shadow: 0px 1px 1px rgba(0, 0, 0, 0.75); + text-shadow: 0 1px 1px rgba(0, 0, 0, 0.75); background-clip: padding-box; - border: 1px solid #111; + border: 1px solid rgb(17, 17, 17); border-radius: 3px; cursor: pointer; -webkit-box-sizing: border-box; @@ -3301,19 +3330,19 @@ jquery.confirm.css } #confirmBox .green { - background-color: #3F7636; + background-color: rgb(63, 118, 54); } #confirmBox .green:hover { - background-color: #48873E; + background-color: rgb(72, 135, 62); } #confirmBox .red { - background-color: #8D2D2B; + background-color: rgb(141, 45, 43); } #confirmBox .red:hover { - background-color: #A13331; + background-color: rgb(161, 51, 49); } /* ======================================================================= @@ -3340,7 +3369,7 @@ login.css .login h1 { padding: 0 0 10px; font-size: 60px; - font-family: Lobster; + font-family: Lobster, cursive; font-weight: normal; } @@ -3408,8 +3437,8 @@ IMDB Popular } .popularShow h3{ - padding:0px; - margin:0px; + padding:0; + margin:0; display:inline-block; margin-right:30px; } @@ -3438,11 +3467,11 @@ IMDB Popular .popularShow .rating{ font-size:90%; display:inline-block; - margin-left:0px; + margin-left:0; } .popularShow p{ - margin-bottom:0px; + margin-bottom:0; } table.home-header { @@ -3499,4 +3528,4 @@ viewLog.mako to configure the same for the columns which are used for the filter inputs.*/ .log-filter { margin-top: 20px; -} \ No newline at end of file +} diff --git a/gui/slick/js/addShowOptions.js b/gui/slick/js/addShowOptions.js index 223bf051e5..5bc5066199 100644 --- a/gui/slick/js/addShowOptions.js +++ b/gui/slick/js/addShowOptions.js @@ -2,10 +2,10 @@ $(document).ready(function () { $('#saveDefaultsButton').click(function () { var anyQualArray = []; var bestQualArray = []; - $('#anyQualities option:selected').each(function (i, d) { + $('#allowed_qualities option:selected').each(function (i, d) { anyQualArray.push($(d).val()); }); - $('#bestQualities option:selected').each(function (i, d) { + $('#preferred_qualities option:selected').each(function (i, d) { bestQualArray.push($(d).val()); }); @@ -28,7 +28,7 @@ $(document).ready(function () { }); }); - $('#statusSelect, #qualityPreset, #flatten_folders, #anyQualities, #bestQualities, #subtitles, #scene, #anime, #statusSelectAfter').change(function () { + $('#statusSelect, #qualityPreset, #flatten_folders, #allowed_qualities, #preferred_qualities, #subtitles, #scene, #anime, #statusSelectAfter').change(function () { $('#saveDefaultsButton').attr('disabled', false); }); diff --git a/gui/slick/js/qualityChooser.js b/gui/slick/js/qualityChooser.js index 81439f94f2..e4d25d68f8 100644 --- a/gui/slick/js/qualityChooser.js +++ b/gui/slick/js/qualityChooser.js @@ -7,7 +7,7 @@ $(document).ready(function() { $('#customQuality').hide(); } - $('#anyQualities option').each(function() { + $('#allowed_qualities option').each(function() { var result = preset & $(this).val(); // jshint ignore:line if (result > 0) { $(this).attr('selected', 'selected'); @@ -16,7 +16,7 @@ $(document).ready(function() { } }); - $('#bestQualities option').each(function() { + $('#preferred_qualities option').each(function() { var result = preset & ($(this).val() << 16); // jshint ignore:line if (result > 0) { $(this).attr('selected', 'selected'); diff --git a/gui/slick/views/500.mako b/gui/slick/views/500.mako index 4a8547d4ac..1d58216097 100644 --- a/gui/slick/views/500.mako +++ b/gui/slick/views/500.mako @@ -1,16 +1,15 @@ <%inherit file="/layouts/main.mako"/> - <%block name="content">

      ${header}

      -A mako error has occured.
      -If this happened during an update a simple page refresh may be the solution.
      -Mako errors that happen during updates may be a one time error if there were significant ui changes.
      +A mako error has occured.
      +If this happened during an update a simple page refresh may be the solution.
      +Mako errors that happen during updates may be a one time error if there were significant ui changes.


      Show/Hide Error
      -
      +
       <% filename, lineno, function, line = backtrace.traceback[-1] %>
      diff --git a/gui/slick/views/IRC.mako b/gui/slick/views/IRC.mako
      index 534bb432fd..86d94a0d0e 100644
      --- a/gui/slick/views/IRC.mako
      +++ b/gui/slick/views/IRC.mako
      @@ -4,5 +4,5 @@
       from sickbeard import GIT_USERNAME
       username = ("MedusaUI|?", GIT_USERNAME)[bool(GIT_USERNAME)]
       %>
      -
      +
       
      diff --git a/gui/slick/views/addShows.mako b/gui/slick/views/addShows.mako
      index bd00ab60ad..258beb0a67 100644
      --- a/gui/slick/views/addShows.mako
      +++ b/gui/slick/views/addShows.mako
      @@ -10,44 +10,37 @@
       % else:
           

      ${title}

      % endif - diff --git a/gui/slick/views/addShows_addExistingShow.mako b/gui/slick/views/addShows_addExistingShow.mako index 55621a5e89..a89cb43904 100644 --- a/gui/slick/views/addShows_addExistingShow.mako +++ b/gui/slick/views/addShows_addExistingShow.mako @@ -3,8 +3,8 @@ import sickbeard %> <%block name="scripts"> - - + + <%block name="content"> % if not header is UNDEFINED: @@ -12,13 +12,11 @@ % else:

      ${title}

      % endif -
      -
      -
      +
      -
      - +

      Medusa can add existing shows, using the current options, by using locally stored NFO/XML metadata to eliminate user interaction. If you would rather have Medusa prompt you to customize each show, then use the checkbox below.

      -

      -
      -

      Displaying folders within these directories which aren't already added to Medusa:

      -
      -
      +
      -
      -
      +
      +
      diff --git a/gui/slick/views/addShows_newShow.mako b/gui/slick/views/addShows_newShow.mako index 5d388a4030..3e4e9c4678 100644 --- a/gui/slick/views/addShows_newShow.mako +++ b/gui/slick/views/addShows_newShow.mako @@ -4,10 +4,10 @@ from sickbeard.helpers import anon_url %> <%block name="scripts"> - - - - + + + + <%block name="content"> % if not header is UNDEFINED: @@ -15,23 +15,19 @@ % else:

      ${title}

      % endif -
      -
      -
      -
      +
      +
      Find a show on theTVDB -
      - % if use_provided_info: Show retrieved from existing metadata: ${provided_indexer_name} @@ -39,56 +35,51 @@ % else: - +    *     - -

      - * This will only affect the language of the retrieved metadata file contents and episode filenames.
      - This DOES NOT allow Medusa to download non-english TV episodes!

      -

      +

      + * This will only affect the language of the retrieved metadata file contents and episode filenames.
      + This DOES NOT allow Medusa to download non-english TV episodes!

      +

      % endif
      -
      Pick the parent folder
      % if provided_show_dir: - Pre-chosen Destination Folder: ${provided_show_dir}
      -
      + Pre-chosen Destination Folder: ${provided_show_dir}
      +
      % else: <%include file="/inc_rootDirs.mako"/> % endif
      -
      Customize options
      <%include file="/inc_addShowOptions.mako"/>
      - % for curNextDir in other_shows: % endfor -
      - +
      % if provided_show_dir: diff --git a/gui/slick/views/addShows_popularShows.mako b/gui/slick/views/addShows_popularShows.mako index 6ced63bbb6..57947b9fed 100644 --- a/gui/slick/views/addShows_popularShows.mako +++ b/gui/slick/views/addShows_popularShows.mako @@ -12,7 +12,6 @@ % else:

      ${title}

      % endif -
      Sort By: - - Sort Order: + Sort Order:
      - <% imdb_tt = [show.imdbid for show in sickbeard.showList if show.imdbid] %> - -
      +
      - % if not popular_shows: -
      +% if not popular_shows: +

      Fetching of IMDB Data failed. Are you online? Exception:

      ${imdb_exception}

      - % else: - % for cur_result in popular_shows: - % if cur_result['imdb_tt'] in imdb_tt: - <% continue %> - % endif - - % if 'rating' in cur_result and cur_result['rating']: - <% cur_rating = cur_result['rating'] %> - <% cur_votes = cur_result['votes'] %> - % else: - <% cur_rating = '0' %> - <% cur_votes = '0' %> - % endif - +% else: + % for cur_result in popular_shows: + % if cur_result['imdb_tt'] not in imdb_tt: + <% + cur_rating = float(cur_result.get('rating', 0)) + cur_votes = int(cur_result.get('votes', 0)) + %>
      -
      - ${(cur_result['name'], ' ')['' == cur_result['name']]} + ${cur_result['name'] or ' '}
      -
      -

      ${int(float(cur_rating)*10)}%

      - % if cur_votes != '0': - ${cur_votes} - % else: - ${cur_votes} votes - % endif +

      ${int(cur_rating*10)}%

      + $('{x} votes'.format(x=cur_votes) if cur_votes else '')
      - % endfor - % endif + % endif + % endfor +% endif
      -
      +
      diff --git a/gui/slick/views/addShows_trendingShows.mako b/gui/slick/views/addShows_trendingShows.mako index d8f3bf50a6..f7e6518f76 100644 --- a/gui/slick/views/addShows_trendingShows.mako +++ b/gui/slick/views/addShows_trendingShows.mako @@ -3,8 +3,8 @@ import sickbeard %> <%block name="scripts"> - - + + <%block name="content"> % if not header is UNDEFINED: @@ -12,7 +12,6 @@ % else:

      ${title}

      % endif -
      • Manage Directories
      • @@ -24,8 +23,7 @@
        <%include file="/inc_addShowOptions.mako"/>
        -
        - +
        Sort By: - - Sort Order: + Sort Order: - - Select Trakt List: + Select Trakt List:
      - -
      +
      -
      - +
      % if traktList: % endif diff --git a/gui/slick/views/apiBuilder.mako b/gui/slick/views/apiBuilder.mako index 74d02fc01e..bfa1ad63e9 100644 --- a/gui/slick/views/apiBuilder.mako +++ b/gui/slick/views/apiBuilder.mako @@ -8,28 +8,23 @@ - % if sbThemeName == "dark": % elif sbThemeName == "light": % endif - Medusa - BRANCH:[${sickbeard.BRANCH}] - ${title} - - - - - - + + + + - @@ -43,28 +38,26 @@ - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + +
      - -
      % for command in sorted(commands): @@ -126,10 +115,8 @@
      ${help['message']}
      - % if help['data']['optionalParameters'] or help['data']['requiredParameters']:

      Parameters

      -
@@ -145,32 +132,25 @@ ${display_parameters_doc(help['data']['optionalParameters'], False)}
% endif -

Playground

- - URL: /api/${apikey}/?cmd=${command}
- + URL: /api/${apikey}/?cmd=${command}
% if help['data']['requiredParameters']: - Required parameters: ${display_parameters_playground(help['data']['requiredParameters'], True, command_id)}
+ Required parameters: ${display_parameters_playground(help['data']['requiredParameters'], True, command_id)}
% endif - % if help['data']['optionalParameters']: - Optional parameters: ${display_parameters_playground(help['data']['optionalParameters'], False, command_id)}
+ Optional parameters: ${display_parameters_playground(help['data']['optionalParameters'], False, command_id)}
% endif - -
- +
@@ -179,17 +159,15 @@ % endfor - - - - + + + - <%def name="display_parameters_doc(parameters, required)"> % for parameter in parameters: @@ -217,7 +195,6 @@ var episodes = ${episodes}; % endfor - <%def name="display_parameters_playground(parameters, required, command)">
% for parameter in parameters: @@ -226,11 +203,9 @@ var episodes = ${episodes}; allowed_values = parameter_help.get('allowedValues', '') type = parameter_help.get('type', '') %> - % if isinstance(allowed_values, list): - - % if allowed_values == [0, 1]: @@ -243,18 +218,15 @@ var episodes = ${episodes}; % elif parameter == 'indexerid': - % if 'season' in parameters: % endif - % if 'episode' in parameters: - -
- -
+

${header}

+
+
+ +
+ +
+
+ +

+ AniDB +

+

AniDB is non-profit database of anime information that is freely open to the public

+
+
-
- -
- - -
+
+
+
+ + +
+
+ + +
+
+ + +
+
+ + +
+
+
+ +

Look and Feel

+

How should the anime functions show and behave.

+
+
- -
-
- - - -
- -
- -
- -

Look and Feel

-

How should the anime functions show and behave.

-
-
-
- - -
- -
-
- -

- -
- - -
- + + + + +
+ +
+ + + + + diff --git a/gui/slick/views/config_backuprestore.mako b/gui/slick/views/config_backuprestore.mako index e70a7a32f5..d2b9bc598c 100644 --- a/gui/slick/views/config_backuprestore.mako +++ b/gui/slick/views/config_backuprestore.mako @@ -16,61 +16,46 @@ % else:

${title}

% endif - <% indexer = 0 %> % if sickbeard.INDEXER_DEFAULT: <% indexer = sickbeard.INDEXER_DEFAULT %> % endif
-
-

Backup

Backup your main database file and config.

-
Select the folder you wish to save your backup file to: - -

- - +

+ - -
- +
-
-

Restore

Restore your main database file and config.

-
Select the backup file you wish to restore: - -

- - +

+ - -
- +
@@ -79,6 +64,5 @@
-
diff --git a/gui/slick/views/config_general.mako b/gui/slick/views/config_general.mako index d6bd1fde34..afed3c501c 100644 --- a/gui/slick/views/config_general.mako +++ b/gui/slick/views/config_general.mako @@ -17,12 +17,10 @@ % else:

${title}

% endif - <% indexer = 0 %> % if sickbeard.INDEXER_DEFAULT: <% indexer = sickbeard.INDEXER_DEFAULT %> % endif -
@@ -32,7 +30,6 @@
  • Interface
  • Advanced Settings
  • -
    @@ -40,9 +37,7 @@

    Startup options. Indexer options. Log and show file locations.

    Some options may require a manual restart to take effect.

    -
    -
    -
    Send to trash for actions

    selected actions use trash (recycle bin) instead of the default permanent delete

    -
    -
    -
    -
    -
    -
    -
    @@ -179,68 +166,58 @@
    -
    -
    -
    - -
    - -
    -

    User Interface

    Options for visual appearance.

    -
    -
    -
    +
    -
    -
    -
    Timezone:

    display dates and times in either your timezone or the shows network timezone

    @@ -342,113 +316,98 @@
    -
    - - -
    -
    -
    -

    Web Interface

    It is recommended that you enable a username and password to secure Medusa from being tampered with remotely.

    These options require a manual restart to take effect.

    -
    -
    -
    -
    -
    -
    -
    -
    -
    @@ -458,7 +417,7 @@ @@ -467,146 +426,124 @@
    -
    - - -
    - -
    -
    -

    Advanced Settings

    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    @@ -654,29 +589,27 @@
    -
    -
    -
    -

    GitHub

    @@ -708,11 +639,11 @@ % if gh_branch: % for cur_branch in gh_branch: % if sickbeard.GIT_USERNAME and sickbeard.GIT_PASSWORD and sickbeard.DEVELOPER == 1: - + % elif sickbeard.GIT_USERNAME and sickbeard.GIT_PASSWORD and cur_branch in ['master', 'develop']: - + % elif cur_branch == 'master': - + % endif % endfor % endif @@ -723,71 +654,64 @@ % endif % if not gh_branch: -

    Error: No branches found.

    +

    Error: No branches found.

    % else:

    select branch to use (restart required)

    % endif
    -
    -
    -
    -
    - -
    - - -
    +
    All non-absolute folder locations are relative to ${sickbeard.DATA_DIR}
    diff --git a/gui/slick/views/config_notifications.mako b/gui/slick/views/config_notifications.mako index 94979e85eb..5074386ca6 100644 --- a/gui/slick/views/config_notifications.mako +++ b/gui/slick/views/config_notifications.mako @@ -12,7 +12,6 @@ % else:

    ${title}

    % endif -
    @@ -22,1874 +21,1811 @@
  • Devices
  • Social
  • -
    -
    - -
    - -

    KODI

    -

    A free and open source cross-platform media center and home entertainment system software with a 10-foot user interface designed for the living-room TV.

    -
    -
    -
    -
    - -
    - -
    -
    - -

    Plex Media Server

    -

    Experience your media on a visually stunning, easy to use interface on your Mac connected to your TV. Your media library has never looked this good!

    -

    For sending notifications to Plex Home Theater (PHT) clients, use the KODI notifier with port 3005.

    -
    -
    -
    - -
    - -
    -
    - - - -
    -
    +
    -
    -
    +
    +
    -
    -
    -
    -
    +
    -
    -
    -
    -
    Click below to test Plex Media Server(s)
    - - -
     
    -
    -
    -
    -
    -
    - -
    - -
    -
    - +
    +
    Click below to test Plex Home Theater(s)
    + + +

    Note: some Plex Home Theaters do not support notifications e.g. Plexapp for Samsung TVs

    +
    +
    +
    +
    +
    +
    + +

    Emby

    +

    A home media server built using other popular open source technologies.

    - -
    +
    -
    +
    +
    + + +
    +
    + +
    +
    Click below to test.
    + + +
    +
    +
    +
    +
    + +

    NMJ

    +

    The Networked Media Jukebox, or NMJ, is the official media jukebox interface made available for the Popcorn Hour 200-series.

    +
    +
    -
    +
    +
    + + +
    +
    + + +
    +
    + + +
    +
    + + +
    +
    Click below to test.
    + + +
    +
    +
    +
    +
    + +

    NMJv2

    +

    The Networked Media Jukebox, or NMJv2, is the official media jukebox interface made available for the Popcorn Hour 300 & 400-series.

    +
    +
    -
    -
    -
    -
    -
    - - -
    -
    - -

    Emby

    -

    A home media server built using other popular open source technologies.

    -
    -
    -
    -
    +
    +
    +
    +
    + +

    Synology

    +

    The Synology DiskStation NAS.

    +

    Synology Indexer is the daemon running on the Synology NAS to build its media database.

    -
    +
    -
    -
    - -
    -
    Click below to test.
    - +
    -
    - -
    -
    - - -
    -
    - -

    NMJ

    -

    The Networked Media Jukebox, or NMJ, is the official media jukebox interface made available for the Popcorn Hour 200-series.

    -
    -
    -
    - +
    +
    +
    +
    +
    + +

    Synology Notifier

    +

    Synology Notifier is the notification system of Synology DSM

    - -
    +
    -
    +
    +
    +
    + +
    +
    + +
    +
    + +
    + +
    + +
    +
    +
    + +

    pyTivo

    +

    pyTivo is both an HMO and GoBack server. This notifier will load the completed downloads to your Tivo.

    +
    +
    - - -
    -
    - - -
    -
    - - -
    -
    Click below to test.
    - - -
    - - -
    - -
    -
    - -

    NMJv2

    -

    The Networked Media Jukebox, or NMJv2, is the official media jukebox interface made available for the Popcorn Hour 300 & 400-series.

    -
    -
    -
    - -
    - -
    -
    - - -
    -
    - Database location - - - - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    Click below to test.
    - - -
    - -
    -
    - - -
    -
    - -

    Synology

    -

    The Synology DiskStation NAS.

    -

    Synology Indexer is the daemon running on the Synology NAS to build its media database.

    -
    - -
    -
    - - -
    - -
    - -
    - -
    -
    - - -
    -
    - -

    Synology Notifier

    -

    Synology Notifier is the notification system of Synology DSM

    -
    - -
    -
    - - -
    -
    -
    - -
    -
    - -
    -
    - -
    - -
    -
    -
    - - -
    -
    - -

    pyTivo

    -

    pyTivo is both an HMO and GoBack server. This notifier will load the completed downloads to your Tivo.

    -
    -
    -
    - - -
    - -
    -
    - - -
    -
    - - -
    -
    - - -
    - -
    - -
    -
    - -
    - - -
    -
    -
    - -

    Growl

    -

    A cross-platform unobtrusive global notification system.

    -
    -
    -
    - -
    - -
    -
    - -
    -
    - -
    -
    - -
    -
    - - -
    -
    - - - -
    -
    Click below to register and test Growl, this is required for Growl notifications to work.
    - - -
    - -
    -
    - - -
    -
    - -

    Prowl

    -

    A Growl client for iOS.

    -
    -
    -
    - -
    - -
    -
    - -
    -
    - -
    -
    - -
    -
    - -
    -
    - - -
    -
    - - - - -
    -
    - - -
    -
    Click below to test.
    - - -
    - -
    -
    - - -
    -
    - -

    Libnotify

    -

    The standard desktop notification API for Linux/*nix systems. This notifier will only function if the pynotify module is installed (Ubuntu/Debian package python-notify).

    -
    -
    -
    - -
    - -
    -
    - -
    -
    - -
    -
    - -
    -
    Click below to test.
    - - -
    - -
    -
    - - -
    -
    - -

    Pushover

    -

    Pushover makes it easy to send real-time notifications to your Android and iOS devices.

    -
    -
    -
    - -
    - -
    -
    - -
    -
    - -
    -
    - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    Click below to test.
    - - -
    - -
    -
    - -
    -
    - -

    Boxcar 2

    -

    Read your messages where and when you want them!

    -
    -
    -
    - -
    - -
    -
    - -
    -
    - -
    -
    - -
    -
    - - -
    -
    Click below to test.
    - - -
    - -
    -
    - -
    -
    - -

    Notify My Android

    -

    Notify My Android is a Prowl-like Android App and API that offers an easy way to send notifications from your application directly to your Android device.

    -
    -
    -
    - -
    - -
    -
    - -
    -
    - -
    -
    - -
    -
    - - -
    -
    - - -
    -
    Click below to test.
    - - -
    - -
    -
    - -
    -
    - -

    Pushalot

    -

    Pushalot is a platform for receiving custom push notifications to connected devices running Windows Phone or Windows 8.

    -
    -
    -
    -
    - -
    -
    - -
    -
    -
    -
    - -
    -
    -
    -
    Click below to test.
    - - -
    - -
    -
    - -
    -
    - -

    Pushbullet

    -

    Pushbullet is a platform for receiving custom push notifications to connected devices running Android and desktop Chrome browsers.

    -
    -
    -
    - +
    +
    + + +
    +
    + + +
    +
    + + +
    + +
    +
    +
    +
    +
    +
    +
    + +

    Growl

    +

    A cross-platform unobtrusive global notification system.

    - -
    -
    - -
    +
    -
    -
    - -
    -
    - - -
    -
    - - -
    -
    Click below to test.
    - - -
    - - -
    -
    -
    - -

    Free Mobile

    -

    Free Mobile is a famous French cellular network provider.
    It provides to their customer a free SMS API.

    -
    -
    -
    - +
    +
    + +
    +
    + +
    +
    + +
    +
    + + +
    +
    + + + +
    +
    Click below to register and test Growl, this is required for Growl notifications to work.
    + + +
    +
    +
    +
    +
    + +

    Prowl

    +

    A Growl client for iOS.

    - -
    -
    - -
    -
    - -
    +
    -
    -
    - - -
    -
    - - -
    -
    Click below to test your settings.
    - - -
    - - -
    -
    -
    - -

    Telegram

    -

    Telegram is a cloud-based instant messaging service.

    -
    -
    -
    - +
    +
    + +
    +
    + +
    +
    + +
    +
    + +
    +
    + + +
    +
    + + + + +
    +
    + + +
    +
    Click below to test.
    + + +
    +
    +
    +
    +
    + +

    Libnotify

    +

    The standard desktop notification API for Linux/*nix systems. This notifier will only function if the pynotify module is installed (Ubuntu/Debian package python-notify).

    - -
    +
    -
    +
    +
    + +
    +
    + +
    +
    + +
    +
    Click below to test.
    + + +
    +
    +
    +
    +
    + +

    Pushover

    +

    Pushover makes it easy to send real-time notifications to your Android and iOS devices.

    +
    +
    -
    +
    +
    + +
    +
    + +
    +
    + +
    +
    + + +
    +
    + + +
    +
    + + +
    +
    + + +
    +
    Click below to test.
    + + +
    +
    +
    +
    +
    + +

    Boxcar 2

    +

    Read your messages where and when you want them!

    +
    +
    -
    -
    - - -
    -
    - - -
    -
    Click below to test your settings.
    - - -
    - - -
    - -
    - -
    -
    -
    - -

    Twitter

    -

    A social networking and microblogging service, enabling its users to send and read other users' messages called tweets.

    -
    -
    -
    - - +
    +
    + +
    +
    + +
    +
    + +
    +
    + + +
    +
    Click below to test.
    + + +
    +
    +
    +
    +
    + +

    Notify My Android

    +

    Notify My Android is a Prowl-like Android App and API that offers an easy way to send notifications from your application directly to your Android device.

    - -
    +
    -
    +
    +
    + +
    +
    + +
    +
    + +
    +
    + + +
    +
    + + +
    +
    Click below to test.
    + + +
    +
    +
    +
    +
    + +

    Pushalot

    +

    Pushalot is a platform for receiving custom push notifications to connected devices running Windows Phone or Windows 8.

    +
    +
    -
    +
    +
    + +
    +
    + +
    +
    + +
    +
    + + +
    +
    Click below to test.
    + + +
    +
    +
    +
    +
    + +

    Pushbullet

    +

    Pushbullet is a platform for receiving custom push notifications to connected devices running Android and desktop Chrome browsers.

    +
    +
    -
    +
    +
    + +
    +
    + +
    +
    + +
    +
    + + +
    +
    + + +
    +
    Click below to test.
    + + +
    +
    +
    +
    +
    + +

    Free Mobile

    +

    Free Mobile is a famous French cellular network provider.
    It provides to their customer a free SMS API.

    +
    +
    -
    +
    +
    + +
    +
    + +
    +
    + +
    +
    + + +
    +
    + + +
    +
    Click below to test your settings.
    + + +
    +
    +
    +
    +
    + +

    Telegram

    +

    Telegram is a cloud-based instant messaging service.

    +
    +
    - -

    - Twitter account to send Direct Messages to (must follow you) -

    -
    -
    - - -
    -
    - - -
    - -
    Click below to test.
    - - -
    - - -
    - - -
    -
    - -

    Trakt

    -

    trakt helps keep a record of what TV shows and movies you are watching. Based on your favorites, trakt recommends additional shows and movies you'll enjoy!

    -
    -
    -
    - +
    +
    + +
    +
    + +
    +
    + +
    +
    + + +
    +
    + + +
    +
    Click below to test your settings.
    + + +
    +
    +
    +
    +
    +
    +
    + +

    Twitter

    +

    A social networking and microblogging service, enabling its users to send and read other users' messages called tweets.

    - -
    -
    - -

    - username of your Trakt account. -

    -
    - - -
    - -

    - PIN code to authorize Medusa to access Trakt on your behalf. -

    -
    - -
    - -

    - - Seconds to wait for Trakt API to respond. (Use 0 to wait forever) - -

    -
    +
    -
    -
    -
    -
    +
    -
    -
    -
    -
    +
    + +
    +
    + +
    +
    + +
    +
    + +

    + Twitter account to send Direct Messages to (must follow you) +

    +
    +
    + + +
    +
    + + +
    + +
    Click below to test.
    + + +
    +
    +
    +
    +
    + +

    Trakt

    +

    trakt helps keep a record of what TV shows and movies you are watching. Based on your favorites, trakt recommends additional shows and movies you'll enjoy!

    +
    +
    +
    + -
    -
    -
    -
    +
    +
    + -
    + + +
    + +

    + PIN code to authorize Medusa to access Trakt on your behalf. +

    +
    - +

    -
    -
    +
    +
    + +
    +
    -
    -
    -
    - - -
    -
    Click below to test.
    - - -
    -
    -
    - -
    -
    - -

    Email

    -

    Allows configuration of email notifications on a per show basis.

    -
    -
    -
    - -
    - -
    -
    - -
    -
    - -
    -
    - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    -
    +
    +
    +
    +
    + +

    Email

    +

    Allows configuration of email notifications on a per show basis.

    +
    +
    +
    + -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - - - -
    - -
    Click below to test.
    - - -
    - -
    - -
    - - -

    - -
    -
    - +
    +
    +
    + +
    +
    + +
    +
    + +
    +
    + + +
    +
    + + +
    +
    + + +
    +
    + +
    +
    + + +
    +
    + + +
    +
    + + +
    +
    + + +
    +
    + + + + +
    +
    + Click below to test. +
    + + +
    + + + + + +

    + +
    diff --git a/gui/slick/views/config_postProcessing.mako b/gui/slick/views/config_postProcessing.mako index afc5c1dfdf..29d362b93c 100644 --- a/gui/slick/views/config_postProcessing.mako +++ b/gui/slick/views/config_postProcessing.mako @@ -13,871 +13,264 @@ %> <%block name="content">
    -% if not header is UNDEFINED: -

    ${header}

    -% else: -

    ${title}

    -% endif -
    -
    -
    -
    - -
    -
    -

    Post-Processing

    -

    Settings that dictate how Medusa should process completed downloads.

    -
    -
    -
    - - - + % if not header is UNDEFINED: +

    ${header}

    + % else: +

    ${title}

    + % endif +
    +
    + +
    + +
    +
    +

    Post-Processing

    +

    Settings that dictate how Medusa should process completed downloads.

    -
    - - - -
    -
    - - - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - - -
    -
    - - -
    -
    - - - -
    -
    - - -
    -
    - - - -
    -
    - - -
    -
    -
    -
    -
    - -
    -

    Episode Naming

    -

    How Medusa will name and sort your episodes.

    -
    - -
    -
    - -
    - -
    -
    +
    +
    + + + +
    +
    + + +
    +
    + + What method should be used to put files into the library? + +
    - - - -
    - -
    - -
    -

    Single-EP Sample:

    -
    -   +
    + +
    -
    -
    - -
    -

    Multi-EP sample:

    -
    -   +
    + +
    -
    -
    - -
    - - - -
    - -
    - - -
    - -
    -
    - -
    -
    - -
    - - -
    - -
    -

    Sample:

    -
    -   -
    -
    +
    + +
    - -
    - -
    - - -
    - -
    -
    +
    + + +
    +
    + + +
    +
    + + +
    +
    + + +
    +
    + + + +
    +
    + +
    - -
    -
    - -
    - - -
    - -
    -

    Sample:

    -
    -   -
    -
    +
    + + + +
    +
    + +
    - -
    - - -
    - - +
    + + + +
    +
    + + +
    +
    +
    +
    +
    +
    +

    Episode Naming

    +

    How Medusa will name and sort your episodes.

    - -
    +
    -
    - -
    +
    +
    - - - +
    -
    - -
    -

    Single-EP Anime Sample:

    +
    +

    Single-EP Sample:

    -   +  
    -
    +
    - -
    -

    Multi-EP Anime sample:

    +
    +

    Multi-EP sample:

    -   +  
    -
    +
    -
    - -
    -
    - - -
    - +
    +
    + +
    +
    +
    + +
    + +
    +
    +

    Sample:

    +
    +   +
    +
    +
    +
    - -
    +
    +
    + +
    +
    +
    + +
    + +
    +
    +

    Sample:

    +
    +   +
    +
    +
    +
    + +
    + +
    - -
    - -
    -
    -
    - -
    -
    -

    Metadata

    -

    The data associated to the data. These are files associated to a TV show in the form of images and text that, when supported, will enhance the viewing experience.

    -
    - -
    -
    - - Toggle the metadata options that you wish to be created. Multiple targets may be used. -
    - - % for (cur_name, cur_generator) in m_dict.iteritems(): - <% cur_metadata_inst = sickbeard.metadata_provider_dict[cur_generator.name] %> - <% cur_id = cur_generator.get_id() %> -
    -
    +
    +
    +
    +

    Metadata

    +

    The data associated to the data. These are files associated to a TV show in the form of images and text that, when supported, will enhance the viewing experience.

    +
    +
    +
    + + Toggle the metadata options that you wish to be created. Multiple targets may be used.
    -
    -
    - -
    -
    All non-absolute folder locations are relative to ${sickbeard.DATA_DIR}
    - - + % endfor +

    +
    +
    +
    +
    +
    All non-absolute folder locations are relative to ${sickbeard.DATA_DIR}
    + + +
    -
    -
    +
    +
    diff --git a/gui/slick/views/config_providers.mako b/gui/slick/views/config_providers.mako index 3c4c5d0688..7199302aca 100644 --- a/gui/slick/views/config_providers.mako +++ b/gui/slick/views/config_providers.mako @@ -5,7 +5,7 @@ from sickrage.providers.GenericProvider import GenericProvider %> <%block name="scripts"> - + - - - - + + + + + <%block name="content"> <%namespace file="/inc_defs.mako" import="renderQualityPill"/> - +
    Change Show: - + - +
    -
    -

    ${show.name}

    - % if seasonResults: ##There is a special/season_0?## % if int(seasonResults[-1]["season"]) == 0: @@ -63,10 +58,9 @@ % if season_special: Display Specials: - ${('Show', 'Hide')[bool(sickbeard.DISPLAY_SHOW_SPECIALS)]} + ${('Show', 'Hide')[bool(sickbeard.DISPLAY_SHOW_SPECIALS)]} % endif -
    % if (len(seasonResults) > 14): @@ -90,38 +84,30 @@ % endfor % endif -
    % endif - -
    - % if show_message:
    ${show_message}
    % endif -
    - +
    -
    -
    % if 'rating' in show.imdb_info: - <% rating_tip = str(show.imdb_info['rating']) + " / 10" + " Stars" + "
    " + str(show.imdb_info['votes']) + " Votes" %> + <% rating_tip = str(show.imdb_info['rating']) + " / 10" + " Stars" + "
    " + str(show.imdb_info['votes']) + " Votes" %> ${show.imdb_info['rating']} % endif - % if not show.imdbid: (${show.startyear}) - ${show.runtime} minutes - % else: % if 'country_codes' in show.imdb_info: % for country in show.imdb_info['country_codes'].split('|'): - + % endfor % endif @@ -130,23 +116,19 @@ % endif ${show.imdb_info.get('runtimes') or show.runtime} minutes - - [imdb] + [imdb] % endif - - ${sickbeard.indexerApi(show.indexer).name} + ${sickbeard.indexerApi(show.indexer).name} - % if xem_numbering or xem_absolute_numbering: - [xem] + [xem] % endif
    -
      % if show.imdb_info.get('genres'): @@ -160,7 +142,6 @@ % endif
    -
    <% anyQualities, bestQualities = Quality.splitQuality(int(show.quality)) %> @@ -169,13 +150,12 @@ ${renderQualityPill(show.quality)} % else: % if anyQualities: - Allowed: ${", ".join([capture(renderQualityPill, x) for x in sorted(anyQualities)])}${("", "
    ")[bool(bestQualities)]} + Allowed: ${", ".join([capture(renderQualityPill, x) for x in sorted(anyQualities)])}${("", "
    ")[bool(bestQualities)]} % endif % if bestQualities: Preferred: ${", ".join([capture(renderQualityPill, x) for x in sorted(bestQualities)])} % endif % endif - % if show.network and show.airs: % elif show.network: @@ -188,12 +168,11 @@ % if showLoc[1]: % else: - + % endif % if all_scene_exceptions: % endif - % if require_words: % endif @@ -218,33 +197,28 @@ % endif - -
    Originally Airs: ${show.airs} ${("(invalid Timeformat) ", "")[network_timezones.test_timeformat(show.airs)]} on ${show.network}
    Location: ${showLoc[0]}
    Location: ${showLoc[0]} (Missing)
    Location: ${showLoc[0]} (Missing)
    Scene Name:${all_scene_exceptions}
    Required Words: ${require_words}
    ${', '.join(bwl.blacklist)}
    Size:${pretty_file_size(sickbeard.helpers.get_size(showLoc[0]))}
    - <% info_flag = subtitles.code_from_code(show.lang) if show.lang else '' %> - + % if sickbeard.USE_SUBTITLES: - + % endif - - - - - - - + + + + + + +
    Info Language:${show.lang}
    Info Language:${show.lang}
    Subtitles: ${(
    Subtitles: ${(
    Season Folders: ${(
    Paused: ${(
    Air-by-Date: ${(
    Sports: ${(
    Anime: ${(
    DVD Order: ${(
    Scene Numbering: ${(
    Season Folders: ${(
    Paused: ${(
    Air-by-Date: ${(
    Sports: ${(
    Anime: ${(
    DVD Order: ${(
    Scene Numbering: ${(
    -
    -
    - Change selected episodes to:
    + Change selected episodes to:
    - -
    - +
    <% total_snatched = epCounts[Overview.SNATCHED] + epCounts[Overview.SNATCHED_PROPER] + epCounts[Overview.SNATCHED_BEST] %> @@ -272,17 +244,15 @@
    -
    -
    -
    -
    - +
    +
    +
    <% curSeason = -1 %> <% odd = 0 %> @@ -291,38 +261,31 @@ epStr = str(epResult["season"]) + "x" + str(epResult["episode"]) if not epStr in epCats: continue - if not sickbeard.DISPLAY_SHOW_SPECIALS and int(epResult["season"]) == 0: continue - scene = False scene_anime = False if not show.air_by_date and not show.is_sports and not show.is_anime and show.is_scene: scene = True elif not show.air_by_date and not show.is_sports and show.is_anime and show.is_scene: scene_anime = True - (dfltSeas, dfltEpis, dfltAbsolute) = (0, 0, 0) if (epResult["season"], epResult["episode"]) in xem_numbering: (dfltSeas, dfltEpis) = xem_numbering[(epResult["season"], epResult["episode"])] - if epResult["absolute_number"] in xem_absolute_numbering: dfltAbsolute = xem_absolute_numbering[epResult["absolute_number"]] - if epResult["absolute_number"] in scene_absolute_numbering: scAbsolute = scene_absolute_numbering[epResult["absolute_number"]] dfltAbsNumbering = False else: scAbsolute = dfltAbsolute dfltAbsNumbering = True - if (epResult["season"], epResult["episode"]) in scene_numbering: (scSeas, scEpis) = scene_numbering[(epResult["season"], epResult["episode"])] dfltEpNumbering = False else: (scSeas, scEpis) = (dfltSeas, dfltEpis) dfltEpNumbering = True - epLoc = epResult["location"] if epLoc and show._location and epLoc.lower().startswith(show._location.lower()): epLoc = epLoc[len(show._location)+1:] @@ -354,7 +317,7 @@

    ${("Specials", "Season " + str(epResult["season"]))[int(epResult["season"]) > 0]} % if not any([i for i in sql_results if epResult['season'] == i['season'] and int(i['status']) == 1]): - search + search % endif

    0]}>
    @@ -389,7 +352,7 @@ - + + @@ -510,7 +473,7 @@ @@ -523,25 +486,23 @@ % endfor

    ${("Specials", "Season " + str(epResult["season"]))[bool(int(epResult["season"]))]} % if not any([i for i in sql_results if epResult['season'] == i['season'] and int(i['status']) == 1]): - search + search % endif

    @@ -433,8 +396,8 @@ % endif -
    ${(${(${(${( <% text = str(epResult['episode']) @@ -454,7 +417,7 @@ % else: value="${str(scSeas)}x${str(scEpis)}" % endif - style="padding: 0; text-align: center; max-width: 60px;" autocapitalize="off" /> + style="padding: 0; text-align: center; max-width: 60px;"/> + style="padding: 0; text-align: center; max-width: 60px;"/> % if epResult["description"] != "" and epResult["description"] is not None: - + % else: - + % endif ${epResult["name"]} % for flag in (epResult["subtitles"] or '').split(','): % if flag.strip(): - ${subtitles.name_from_code(flag)} + ${subtitles.name_from_code(flag)} % endif % endfor
    - - - - - diff --git a/gui/slick/views/editShow.mako b/gui/slick/views/editShow.mako index 6c4f1d88e9..154bad2598 100644 --- a/gui/slick/views/editShow.mako +++ b/gui/slick/views/editShow.mako @@ -8,53 +8,44 @@ from sickrage.helper import exceptions from sickbeard import scene_exceptions %> - <%block name="metas"> - <%block name="scripts"> - - + + % if show.is_anime: - + % endif - <%block name="content"> % if not header is UNDEFINED:

    ${header}

    % else:

    ${title}

    % endif -
    -
    -
    -

    Main Settings

    -
    -
    -
    -
    -
    -
    -
    -

    Format Settings

    -
    -
    -
    -
    -
    -
    -
    -

    Advanced Settings

    -
    -
    diff --git a/gui/slick/views/manage_failedDownloads.mako b/gui/slick/views/manage_failedDownloads.mako index 243e22d6a7..5f4aa3c274 100644 --- a/gui/slick/views/manage_failedDownloads.mako +++ b/gui/slick/views/manage_failedDownloads.mako @@ -15,23 +15,21 @@ % else:

    ${title}

    % endif -
    Limit:
    - - @@ -55,9 +53,9 @@ diff --git a/gui/slick/views/manage_manageSearches.mako b/gui/slick/views/manage_manageSearches.mako index b4ce4d1176..fa4b3df4be 100644 --- a/gui/slick/views/manage_manageSearches.mako +++ b/gui/slick/views/manage_manageSearches.mako @@ -6,7 +6,7 @@ from sickbeard.common import Quality, qualityPresets, statusStrings, qualityPresetStrings, cpu_presets %> <%block name="scripts"> - + <%block name="content">
    @@ -15,52 +15,47 @@ % else:

    ${title}

    % endif -

    Backlog Search:

    - Force - ${('pause', 'Unpause')[bool(backlogPaused)]} + Force + ${('pause', 'Unpause')[bool(backlogPaused)]} % if not backlogRunning: - Not in progress
    + Not in progress
    % else: - ${('', 'Paused:')[bool(backlogPaused)]} - Currently running
    + ${'Paused:' if backlogPaused else ''} + Currently running
    % endif -
    - +

    Daily Search:

    - Force -${('Not in progress', 'In Progress')[dailySearchStatus]}
    -
    - + Force +${('Not in progress', 'In Progress')[dailySearchStatus]}
    +

    Find Propers Search:

    - Force + Force % if not sickbeard.DOWNLOAD_PROPERS: - Propers search disabled
    + Propers search disabled
    % elif not findPropersStatus: - Not in progress
    + Not in progress
    % else: - In Progress
    + In Progress
    % endif -
    - +

    Subtitle Search:

    - Force + Force % if not sickbeard.USE_SUBTITLES: - Subtitle search disabled
    + Subtitle search disabled
    % elif not subtitlesFinderStatus: - Not in progress
    + Not in progress
    % else: - In Progress
    + In Progress
    % endif -
    - +

    Search Queue:

    -Backlog: ${searchQueueLength['backlog']} pending items
    -Daily: ${searchQueueLength['daily']} pending items
    -Forced: ${forcedSearchQueueLength['forced_search']} pending items
    -Manual: ${forcedSearchQueueLength['manual_search']} pending items
    -Failed: ${forcedSearchQueueLength['failed']} pending items
    +Backlog: ${searchQueueLength['backlog']} pending items
    +Daily: ${searchQueueLength['daily']} pending items
    +Forced: ${forcedSearchQueueLength['forced_search']} pending items
    +Manual: ${forcedSearchQueueLength['manual_search']} pending items
    +Failed: ${forcedSearchQueueLength['failed']} pending items
    diff --git a/gui/slick/views/manage_massEdit.mako b/gui/slick/views/manage_massEdit.mako index a2deb72897..2e85af0013 100644 --- a/gui/slick/views/manage_massEdit.mako +++ b/gui/slick/views/manage_massEdit.mako @@ -6,51 +6,40 @@ from sickbeard.common import Quality, qualityPresets, qualityPresetStrings, statusStrings from sickrage.helper import exceptions %> - <%block name="scripts"> <% if quality_value is not None: initial_quality = int(quality_value) else: initial_quality = common.SD - anyQualities, bestQualities = common.Quality.splitQuality(initial_quality) %> - - + + - <%block name="content"> -
    -
    -
    -

    Main Settings

    - - ==> Changing any settings marked with (*) will force a refresh of the selected shows.
    -
    - + NOTE: Changing any settings marked with (*) will force a refresh of the selected shows.
    +
    -
    -
    % endfor @@ -82,7 +71,6 @@ -
    - - - diff --git a/gui/slick/views/manage_subtitleMissed.mako b/gui/slick/views/manage_subtitleMissed.mako index 41e8b838c6..fc202ab7be 100644 --- a/gui/slick/views/manage_subtitleMissed.mako +++ b/gui/slick/views/manage_subtitleMissed.mako @@ -18,16 +18,16 @@ % if not whichSubs or (whichSubs and not ep_counts): % if whichSubs:

    All of your episodes have ${subsLanguage} subtitles.

    -
    +
    % endif - - + % if sickbeard.SUBTITLES_MULTI: Manage episodes without % else: Manage episodes without + % endif - % else: ##Strange that this is used by js but is an input outside of any form? - + % if sickbeard.SUBTITLES_MULTI:

    Episodes without ${subsLanguage} subtitles.

    % else: @@ -57,18 +56,18 @@ % endif % endfor % endif -
    +
    Download missed subtitles for selected episodes
    - - + +
    -
    +
    Release Size ProviderRemove
    +
    Remove
    <% provider = providers.getProviderClass(GenericProvider.make_id(hItem["provider"])) %> % if provider is not None: - ${provider.name} + ${provider.name} % else: - missing provider + missing provider % endif
    % for cur_indexer_id in sorted_show_ids: - + % endfor
    ${show_names[cur_indexer_id]} (${ep_counts[cur_indexer_id]}) ${show_names[cur_indexer_id]} (${ep_counts[cur_indexer_id]})
    diff --git a/gui/slick/views/manage_torrents.mako b/gui/slick/views/manage_torrents.mako index b4a95fa310..0f49de5854 100644 --- a/gui/slick/views/manage_torrents.mako +++ b/gui/slick/views/manage_torrents.mako @@ -1,6 +1,6 @@ <%inherit file="/layouts/main.mako"/> <%block name="scripts"> - + <%block name="content"> % if not header is UNDEFINED: @@ -8,7 +8,6 @@ % else:

    ${title}

    % endif - ${info_download_station} - + diff --git a/gui/slick/views/partials/alerts.mako b/gui/slick/views/partials/alerts.mako new file mode 100644 index 0000000000..ab0c4c1748 --- /dev/null +++ b/gui/slick/views/partials/alerts.mako @@ -0,0 +1,15 @@ +<%! + from sickbeard import BRANCH, DEVELOPER, NEWEST_VERSION_STRING +%> + +% if BRANCH and BRANCH != 'master' and not DEVELOPER and loggedIn: + +% endif +% if NEWEST_VERSION_STRING and loggedIn: + +% endif + diff --git a/gui/slick/views/partials/footer.mako b/gui/slick/views/partials/footer.mako index 2ab04a67be..c3f70f49ca 100644 --- a/gui/slick/views/partials/footer.mako +++ b/gui/slick/views/partials/footer.mako @@ -1,62 +1,67 @@ <%! - import datetime + from datetime import datetime + from time import time + from contextlib2 import suppress + import os import re - import sickbeard + from sickbeard import ( + dailySearchScheduler as daily_search_scheduler, + backlogSearchScheduler as backlog_search_scheduler, + BRANCH, DATE_PRESET, TIME_PRESET + ) from sickrage.helper.common import pretty_file_size from sickrage.show.Show import Show - from time import time - # resource module is unix only - has_resource_module = True - try: - import resource - except ImportError: - has_resource_module = False -%> -<% - srRoot = sickbeard.WEB_ROOT + mem_usage = None + with suppress(ImportError): + from psutil import Process + from os import getpid + mem_usage = Process(getpid()).memory_info().rss + + with suppress(ImportError): + if not mem_usage: + import resource # resource module is unix only + mem_usage = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss + + stats = Show.overall_stats() + ep_downloaded = stats['episodes']['downloaded'] + ep_snatched = stats['episodes']['snatched'] + ep_total = stats['episodes']['total'] + ep_percentage = '' if ep_total == 0 else '(%s%%)' % re.sub(r'(\d+)(\.\d)\d+', r'\1\2', str((float(ep_downloaded)/float(ep_total))*100)) %> + % if loggedIn:
    - - - - - - - - - - - - + + + + + + + + + + + + % endif + diff --git a/gui/slick/views/partials/header.mako b/gui/slick/views/partials/header.mako index 595e5fe1e5..06f5afd9cf 100644 --- a/gui/slick/views/partials/header.mako +++ b/gui/slick/views/partials/header.mako @@ -5,17 +5,8 @@ from sickrage.helper.common import pretty_file_size from sickrage.show.Show import Show from time import time - - # resource module is unix only - has_resource_module = True - try: - import resource - except ImportError: - has_resource_module = False -%> -<% - srRoot = sickbeard.WEB_ROOT %> + + diff --git a/gui/slick/views/partials/home/banner.mako b/gui/slick/views/partials/home/banner.mako index d20da4260d..95d510042f 100644 --- a/gui/slick/views/partials/home/banner.mako +++ b/gui/slick/views/partials/home/banner.mako @@ -29,7 +29,7 @@ - Add ${('Show', 'Anime')[curListType == 'Anime']} + Add ${('Show', 'Anime')[curListType == 'Anime']}       @@ -75,42 +75,32 @@ cur_total = 0 show_size = 0 download_stat_tip = '' - if curShow.indexerid in show_stat: cur_airs_next = show_stat[curShow.indexerid]['ep_airs_next'] cur_airs_prev = show_stat[curShow.indexerid]['ep_airs_prev'] - cur_snatched = show_stat[curShow.indexerid]['ep_snatched'] if not cur_snatched: cur_snatched = 0 - cur_downloaded = show_stat[curShow.indexerid]['ep_downloaded'] if not cur_downloaded: cur_downloaded = 0 - cur_total = show_stat[curShow.indexerid]['ep_total'] if not cur_total: cur_total = 0 - show_size = show_stat[curShow.indexerid]['show_size'] - download_stat = str(cur_downloaded) download_stat_tip = "Downloaded: " + str(cur_downloaded) - if cur_snatched: download_stat = download_stat + "+" + str(cur_snatched) download_stat_tip = download_stat_tip + " " + "Snatched: " + str(cur_snatched) - download_stat = download_stat + " / " + str(cur_total) download_stat_tip = download_stat_tip + " " + "Total: " + str(cur_total) - nom = cur_downloaded if cur_total: den = cur_total else: den = 1 download_stat_tip = "Unaired" - progressbar_percent = nom * 100 / den %> @@ -126,7 +116,6 @@ % else: % endif - % if cur_airs_prev: <% airDate = sbdatetime.sbdatetime.convert_to_setting(network_timezones.parse_date_time(cur_airs_prev, curShow.airs, curShow.network)) %> % try: @@ -139,41 +128,34 @@ % else: % endif - ${curShow.name} - % if curShow.network: - ${curShow.network} + ${curShow.network} ${curShow.network} % else: - No Network + No Network No Network % endif - ${renderQualityPill(curShow.quality, showTitle=True)} - ## This first span is used for sorting and is never displayed to user ${download_stat}
    ${download_stat} - ${pretty_file_size(show_size)} - <% paused = int(curShow.paused) == 0 and curShow.status == 'Continuing' %> - ${('No', 'Yes')[bool(paused)]} + ${('No', 'Yes')[bool(paused)]} - <% display_status = curShow.status diff --git a/gui/slick/views/partials/home/poster.mako b/gui/slick/views/partials/home/poster.mako index 23ec7be384..6d669675fa 100644 --- a/gui/slick/views/partials/home/poster.mako +++ b/gui/slick/views/partials/home/poster.mako @@ -20,18 +20,15 @@ % for curLoadingShow in sickbeard.showQueueScheduler.action.loadingShowList: % if curLoadingShow.show is None:
    - +
    Loading... (${curLoadingShow.show_name})
    - % endif % endfor - <% myShowList.sort(lambda x, y: cmp(x.name, y.name)) %> % for curShow in myShowList: - <% cur_airs_next = '' cur_snatched = 0 @@ -39,47 +36,36 @@ cur_total = 0 download_stat_tip = '' display_status = curShow.status - if None is not display_status: if re.search(r'(?i)(?:new|returning)\s*series', curShow.status): display_status = 'Continuing' elif re.search(r'(?i)(?:nded)', curShow.status): display_status = 'Ended' - if curShow.indexerid in show_stat: cur_airs_next = show_stat[curShow.indexerid]['ep_airs_next'] - cur_snatched = show_stat[curShow.indexerid]['ep_snatched'] if not cur_snatched: cur_snatched = 0 - cur_downloaded = show_stat[curShow.indexerid]['ep_downloaded'] if not cur_downloaded: cur_downloaded = 0 - cur_total = show_stat[curShow.indexerid]['ep_total'] if not cur_total: cur_total = 0 - download_stat = str(cur_downloaded) download_stat_tip = "Downloaded: " + str(cur_downloaded) - if cur_snatched: download_stat = download_stat + "+" + str(cur_snatched) download_stat_tip = download_stat_tip + " " + "Snatched: " + str(cur_snatched) - download_stat = download_stat + " / " + str(cur_total) download_stat_tip = download_stat_tip + " " + "Total: " + str(cur_total) - nom = cur_downloaded if cur_total: den = cur_total else: den = 1 download_stat_tip = "Unaired" - progressbar_percent = nom * 100 / den - data_date = '6000000000.0' if cur_airs_next: data_date = calendar.timegm(sbdatetime.sbdatetime.convert_to_setting(network_timezones.parse_date_time(cur_airs_next, curShow.airs, curShow.network)).timetuple()) @@ -93,15 +79,12 @@ %>
    - +
    -
    -
    ${curShow.name}
    -
    % if cur_airs_next: <% ldatetime = sbdatetime.sbdatetime.convert_to_setting(network_timezones.parse_date_time(cur_airs_next, curShow.airs, curShow.network)) %> @@ -126,31 +109,26 @@ ${output_html} % endif
    -
    - -
    ${download_stat} % if curShow.network: - ${curShow.network} + ${curShow.network} % else: - No Network + No Network % endif ${renderQualityPill(curShow.quality, showTitle=True, overrideClass="show-quality")}
    -
    - % endfor
    diff --git a/gui/slick/views/partials/home/simple.mako b/gui/slick/views/partials/home/simple.mako index 8fa68617b7..c3d8baf3f7 100644 --- a/gui/slick/views/partials/home/simple.mako +++ b/gui/slick/views/partials/home/simple.mako @@ -29,7 +29,7 @@ - Add ${('Show', 'Anime')[curListType == 'Anime']} + Add ${('Show', 'Anime')[curListType == 'Anime']}       @@ -75,42 +75,32 @@ cur_total = 0 show_size = 0 download_stat_tip = '' - if curShow.indexerid in show_stat: cur_airs_next = show_stat[curShow.indexerid]['ep_airs_next'] cur_airs_prev = show_stat[curShow.indexerid]['ep_airs_prev'] - cur_snatched = show_stat[curShow.indexerid]['ep_snatched'] if not cur_snatched: cur_snatched = 0 - cur_downloaded = show_stat[curShow.indexerid]['ep_downloaded'] if not cur_downloaded: cur_downloaded = 0 - cur_total = show_stat[curShow.indexerid]['ep_total'] if not cur_total: cur_total = 0 - show_size = show_stat[curShow.indexerid]['show_size'] - download_stat = str(cur_downloaded) download_stat_tip = "Downloaded: " + str(cur_downloaded) - if cur_snatched: download_stat = download_stat + "+" + str(cur_snatched) download_stat_tip = download_stat_tip + " " + "Snatched: " + str(cur_snatched) - download_stat = download_stat + " / " + str(cur_total) download_stat_tip = download_stat_tip + " " + "Total: " + str(cur_total) - nom = cur_downloaded if cur_total: den = cur_total else: den = 1 download_stat_tip = "Unaired" - progressbar_percent = nom * 100 / den %> @@ -126,7 +116,6 @@ % else: % endif - % if cur_airs_prev: <% airDate = sbdatetime.sbdatetime.convert_to_setting(network_timezones.parse_date_time(cur_airs_prev, curShow.airs, curShow.network)) %> % try: @@ -139,29 +128,22 @@ % else: % endif - - ${curShow.name} - + ${curShow.name} ${curShow.network} - ${renderQualityPill(curShow.quality, showTitle=True)} - ## This first span is used for sorting and is never displayed to user ${download_stat}
    ${download_stat} - ${pretty_file_size(show_size)} - <% paused = int(curShow.paused) == 0 and curShow.status == 'Continuing' %> - ${('No', 'Yes')[bool(paused)]} + ${('No', 'Yes')[bool(paused)]} - <% display_status = curShow.status diff --git a/gui/slick/views/partials/home/small.mako b/gui/slick/views/partials/home/small.mako index 83769c636e..aa4f621c21 100644 --- a/gui/slick/views/partials/home/small.mako +++ b/gui/slick/views/partials/home/small.mako @@ -29,7 +29,7 @@ - Add ${('Show', 'Anime')[curListType == 'Anime']} + Add ${('Show', 'Anime')[curListType == 'Anime']}       @@ -75,42 +75,32 @@ cur_total = 0 show_size = 0 download_stat_tip = '' - if curShow.indexerid in show_stat: cur_airs_next = show_stat[curShow.indexerid]['ep_airs_next'] cur_airs_prev = show_stat[curShow.indexerid]['ep_airs_prev'] - cur_snatched = show_stat[curShow.indexerid]['ep_snatched'] if not cur_snatched: cur_snatched = 0 - cur_downloaded = show_stat[curShow.indexerid]['ep_downloaded'] if not cur_downloaded: cur_downloaded = 0 - cur_total = show_stat[curShow.indexerid]['ep_total'] if not cur_total: cur_total = 0 - show_size = show_stat[curShow.indexerid]['show_size'] - download_stat = str(cur_downloaded) download_stat_tip = "Downloaded: " + str(cur_downloaded) - if cur_snatched: download_stat = download_stat + "+" + str(cur_snatched) download_stat_tip = download_stat_tip + " " + "Snatched: " + str(cur_snatched) - download_stat = download_stat + " / " + str(cur_total) download_stat_tip = download_stat_tip + " " + "Total: " + str(cur_total) - nom = cur_downloaded if cur_total: den = cur_total else: den = 1 download_stat_tip = "Unaired" - progressbar_percent = nom * 100 / den %> @@ -126,7 +116,6 @@ % else: % endif - % if cur_airs_prev: <% airDate = sbdatetime.sbdatetime.convert_to_setting(network_timezones.parse_date_time(cur_airs_prev, curShow.airs, curShow.network)) %> % try: @@ -139,42 +128,35 @@ % else: % endif - - % if curShow.network: - ${curShow.network} + ${curShow.network} ${curShow.network} % else: - No Network + No Network No Network % endif - ${renderQualityPill(curShow.quality, showTitle=True)} - ## This first span is used for sorting and is never displayed to user ${download_stat}
    ${download_stat} - ${pretty_file_size(show_size)} - <% paused = int(curShow.paused) == 0 and curShow.status == 'Continuing' %> - ${('No', 'Yes')[bool(paused)]} + ${('No', 'Yes')[bool(paused)]} - <% display_status = curShow.status diff --git a/gui/slick/views/partials/submenu.mako b/gui/slick/views/partials/submenu.mako new file mode 100644 index 0000000000..38277286c9 --- /dev/null +++ b/gui/slick/views/partials/submenu.mako @@ -0,0 +1,26 @@ + + + diff --git a/gui/slick/views/restart.mako b/gui/slick/views/restart.mako index 8e91bbef85..bea94e2127 100644 --- a/gui/slick/views/restart.mako +++ b/gui/slick/views/restart.mako @@ -5,7 +5,6 @@ import sickbeard <%block name="metas"> - <%block name="css"> - <%block name="content"> <% try: @@ -25,22 +23,19 @@ except NameError:
    Waiting for Medusa to shut down: - - + +
    - - - diff --git a/gui/slick/views/schedule.mako b/gui/slick/views/schedule.mako index 7ff0fe2a29..9457c19ece 100644 --- a/gui/slick/views/schedule.mako +++ b/gui/slick/views/schedule.mako @@ -8,8 +8,8 @@ import re %> <%block name="scripts"> - - + + <%block name="css"> <%block name="content"> -
    -
    - % if not header is UNDEFINED: -

    ${header}

    - % else: -

    ${title}

    - % endif -
    - -
    -
    - - -
    - -
    - - - -
    - -
    - - - -
    - -
    -
    -
    +
    + % if not header is UNDEFINED: +

    ${header}

    + % else: +

    ${title}

    + % endif +
    +
    +
    + + +
    + +
    + + +
    + +
    + + +
    + +
    +
    +
    -
    +
     ${logLines}
     
    -
    +
    diff --git a/setup.py b/setup.py index a5111937b9..cb60307c32 100644 --- a/setup.py +++ b/setup.py @@ -23,6 +23,9 @@ 'rednose', 'mock', ], + extras_require={ + 'system-stats': ['psutil'], + }, classifiers=[ 'Development Status :: ???', 'Intended Audience :: Developers', diff --git a/sickbeard/server/web/home/add_shows.py b/sickbeard/server/web/home/add_shows.py index 8a6e4a40b0..82af9f3081 100644 --- a/sickbeard/server/web/home/add_shows.py +++ b/sickbeard/server/web/home/add_shows.py @@ -418,7 +418,7 @@ def addShowByID(self, indexer_id, show_name, indexer='TVDB', which_series=None, if Show.find(sickbeard.showList, int(indexer_id)): return - # Sanitize the paramater anyQualities and bestQualities. As these would normally be passed as lists + # Sanitize the paramater allowed_qualities and preferred_qualities. As these would normally be passed as lists if any_qualities: any_qualities = any_qualities.split(',') else: diff --git a/sickbeard/show_name_helpers.py b/sickbeard/show_name_helpers.py index 58cb884b4c..6c0d024950 100644 --- a/sickbeard/show_name_helpers.py +++ b/sickbeard/show_name_helpers.py @@ -53,6 +53,9 @@ def containsAtLeastOneWord(name, words): Returns: False if the name doesn't contain any word of words list, or the found word from the list. """ + if not (name and words): + return False + if isinstance(words, string_types): words = words.split(',') items = [(re.compile(r'(^|[\W_])%s($|[\W_])' % word.strip(), re.I), word.strip()) for word in words] @@ -63,9 +66,10 @@ def containsAtLeastOneWord(name, words): # If word is a regex like "dub(bed)?" or "sub(bed|ed|pack|s)" # then return just the matched word: "dub" and not full regex if word in resultFilters: - return subs_word.replace(".","") + return subs_word.replace(".", "") else: return word + return False diff --git a/sickrage/helper/common.py b/sickrage/helper/common.py index aba7f8b972..6927bbbba0 100644 --- a/sickrage/helper/common.py +++ b/sickrage/helper/common.py @@ -332,6 +332,7 @@ def episode_num(season=None, episode=None, **kwargs): if not (season and episode) and (season or episode): return '{0:0>3}'.format(season or episode) + def enabled_providers(search_type): """ Return providers based on search type: daily, backlog and manualsearch @@ -341,6 +342,7 @@ def enabled_providers(search_type): hasattr(x, 'enable_{}'.format(search_type)) and getattr(x, 'enable_{}'.format(search_type))] + def remove_strings(old_string, unwanted_strings): """ Return string removing all unwanted strings on it @@ -356,5 +358,3 @@ def remove_strings(old_string, unwanted_strings): for item in unwanted_strings: old_string = old_string.replace(item, '') return old_string - - From d77981e9c625ae6255d6303bd6193278819cb18c Mon Sep 17 00:00:00 2001 From: Labrys of Knossos Date: Tue, 28 Jun 2016 08:12:48 -0400 Subject: [PATCH 117/134] Revert "Ui cleanup (#689)" This reverts commit 8fb13faf9b63c1875e895d6867e53a69af6b6a35. --- gui/slick/css/browser.css | 2 +- gui/slick/css/country-flags.css | 755 ++-- gui/slick/css/dark.css | 427 +-- gui/slick/css/light.css | 94 +- gui/slick/css/style.css | 1013 +++-- gui/slick/js/addShowOptions.js | 6 +- gui/slick/js/qualityChooser.js | 4 +- gui/slick/views/500.mako | 9 +- gui/slick/views/IRC.mako | 2 +- gui/slick/views/addShows.mako | 21 +- gui/slick/views/addShows_addExistingShow.mako | 21 +- gui/slick/views/addShows_newShow.mako | 41 +- gui/slick/views/addShows_popularShows.mako | 56 +- gui/slick/views/addShows_trendingShows.mako | 20 +- gui/slick/views/apiBuilder.mako | 102 +- gui/slick/views/config.mako | 6 +- gui/slick/views/config_anime.mako | 165 +- gui/slick/views/config_backuprestore.mako | 28 +- gui/slick/views/config_general.mako | 226 +- gui/slick/views/config_notifications.mako | 3310 +++++++++-------- gui/slick/views/config_postProcessing.mako | 1837 ++++----- gui/slick/views/config_providers.mako | 184 +- gui/slick/views/config_search.mako | 149 +- gui/slick/views/config_subtitles.mako | 74 +- gui/slick/views/displayShow.mako | 134 +- gui/slick/views/editShow.mako | 70 +- gui/slick/views/history.mako | 57 +- gui/slick/views/home.mako | 27 +- gui/slick/views/home_massAddTable.mako | 12 +- gui/slick/views/home_postprocess.mako | 6 +- gui/slick/views/inc_addShowOptions.mako | 25 +- gui/slick/views/inc_blackwhitelist.mako | 15 +- gui/slick/views/inc_defs.mako | 5 + gui/slick/views/inc_qualityChooser.mako | 16 +- gui/slick/views/inc_rootDirs.mako | 5 +- gui/slick/views/layouts/main.mako | 224 +- gui/slick/views/manage.mako | 39 +- gui/slick/views/manage_backlogOverview.mako | 20 +- gui/slick/views/manage_episodeStatuses.mako | 33 +- gui/slick/views/manage_failedDownloads.mako | 16 +- gui/slick/views/manage_manageSearches.mako | 55 +- gui/slick/views/manage_massEdit.mako | 117 +- gui/slick/views/manage_subtitleMissed.mako | 21 +- gui/slick/views/manage_torrents.mako | 5 +- gui/slick/views/partials/alerts.mako | 15 - gui/slick/views/partials/footer.mako | 87 +- gui/slick/views/partials/header.mako | 112 +- gui/slick/views/partials/home/banner.mako | 30 +- gui/slick/views/partials/home/poster.mako | 30 +- gui/slick/views/partials/home/simple.mako | 24 +- gui/slick/views/partials/home/small.mako | 32 +- gui/slick/views/partials/submenu.mako | 26 - gui/slick/views/restart.mako | 17 +- gui/slick/views/schedule.mako | 138 +- gui/slick/views/snatchSelection.mako | 816 ++-- gui/slick/views/status.mako | 5 +- gui/slick/views/testRename.mako | 16 +- gui/slick/views/trendingShows.mako | 14 +- gui/slick/views/viewlogs.mako | 92 +- setup.py | 3 - sickbeard/server/web/home/add_shows.py | 2 +- sickbeard/show_name_helpers.py | 6 +- sickrage/helper/common.py | 4 +- 63 files changed, 5591 insertions(+), 5332 deletions(-) delete mode 100644 gui/slick/views/partials/alerts.mako delete mode 100644 gui/slick/views/partials/submenu.mako diff --git a/gui/slick/css/browser.css b/gui/slick/css/browser.css index 56cd697714..bdc1f3ab3d 100644 --- a/gui/slick/css/browser.css +++ b/gui/slick/css/browser.css @@ -20,7 +20,7 @@ } .fileBrowserDialog ul li a:hover { - color: rgb(0, 0, 255); + color: #00f; background: none; } diff --git a/gui/slick/css/country-flags.css b/gui/slick/css/country-flags.css index 1f9a424964..44ca6e1976 100644 --- a/gui/slick/css/country-flags.css +++ b/gui/slick/css/country-flags.css @@ -1,508 +1,257 @@ .country-flag { width: 16px; height: 11px; - background:url(../images/country-flags.png) no-repeat; + background:url(../images/country-flags.png) no-repeat } -.country-flag.flag-ad {background-position: -16px 0; -} -.country-flag.flag-ae {background-position: -32px 0; -} -.country-flag.flag-af {background-position: -48px 0; -} -.country-flag.flag-ag {background-position: -64px 0; -} -.country-flag.flag-ai {background-position: -80px 0; -} -.country-flag.flag-al {background-position: -96px 0; -} -.country-flag.flag-am {background-position: -112px 0; -} -.country-flag.flag-an {background-position: -128px 0; -} -.country-flag.flag-ao {background-position: -144px 0; -} -.country-flag.flag-ar {background-position: -160px 0; -} -.country-flag.flag-as {background-position: -176px 0; -} -.country-flag.flag-at {background-position: -192px 0; -} -.country-flag.flag-au {background-position: -208px 0; -} -.country-flag.flag-aw {background-position: -224px 0; -} -.country-flag.flag-az {background-position: -240px 0; -} -.country-flag.flag-ba {background-position: 0 -11px; -} -.country-flag.flag-bb {background-position: -16px -11px; -} -.country-flag.flag-bd {background-position: -32px -11px; -} -.country-flag.flag-be {background-position: -48px -11px; -} -.country-flag.flag-bf {background-position: -64px -11px; -} -.country-flag.flag-bg {background-position: -80px -11px; -} -.country-flag.flag-bh {background-position: -96px -11px; -} -.country-flag.flag-bi {background-position: -112px -11px; -} -.country-flag.flag-bj {background-position: -128px -11px; -} -.country-flag.flag-bm {background-position: -144px -11px; -} -.country-flag.flag-bn {background-position: -160px -11px; -} -.country-flag.flag-bo {background-position: -176px -11px; -} -.country-flag.flag-br {background-position: -192px -11px; -} -.country-flag.flag-bs {background-position: -208px -11px; -} -.country-flag.flag-bt {background-position: -224px -11px; -} -.country-flag.flag-bv {background-position: -240px -11px; -} -.country-flag.flag-bw {background-position: 0 -22px; -} -.country-flag.flag-by {background-position: -16px -22px; -} -.country-flag.flag-bz {background-position: -32px -22px; -} -.country-flag.flag-ca {background-position: -48px -22px; -} -.country-flag.flag-catalonia {background-position: -64px -22px; -} -.country-flag.flag-cd {background-position: -80px -22px; -} -.country-flag.flag-cf {background-position: -96px -22px; -} -.country-flag.flag-cg {background-position: -112px -22px; -} -.country-flag.flag-ch {background-position: -128px -22px; -} -.country-flag.flag-ci {background-position: -144px -22px; -} -.country-flag.flag-ck {background-position: -160px -22px; -} -.country-flag.flag-cl {background-position: -176px -22px; -} -.country-flag.flag-cm {background-position: -192px -22px; -} -.country-flag.flag-cn {background-position: -208px -22px; -} -.country-flag.flag-co {background-position: -224px -22px; -} -.country-flag.flag-cr {background-position: -240px -22px; -} -.country-flag.flag-cu {background-position: 0 -33px; -} -.country-flag.flag-cv {background-position: -16px -33px; -} -.country-flag.flag-cw {background-position: -32px -33px; -} -.country-flag.flag-cy {background-position: -48px -33px; -} -.country-flag.flag-cz {background-position: -64px -33px; -} -.country-flag.flag-de {background-position: -80px -33px; -} -.country-flag.flag-dj {background-position: -96px -33px; -} -.country-flag.flag-dk {background-position: -112px -33px; -} -.country-flag.flag-dm {background-position: -128px -33px; -} -.country-flag.flag-do {background-position: -144px -33px; -} -.country-flag.flag-dz {background-position: -160px -33px; -} -.country-flag.flag-ec {background-position: -176px -33px; -} -.country-flag.flag-ee {background-position: -192px -33px; -} -.country-flag.flag-eg {background-position: -208px -33px; -} -.country-flag.flag-eh {background-position: -224px -33px; -} -.country-flag.flag-england {background-position: -240px -33px; -} -.country-flag.flag-er {background-position: 0 -44px; -} -.country-flag.flag-es {background-position: -16px -44px; -} -.country-flag.flag-et {background-position: -32px -44px; -} -.country-flag.flag-eu {background-position: -48px -44px; -} -.country-flag.flag-fi {background-position: -64px -44px; -} -.country-flag.flag-fj {background-position: -80px -44px; -} -.country-flag.flag-fk {background-position: -96px -44px; -} -.country-flag.flag-fm {background-position: -112px -44px; -} -.country-flag.flag-fo {background-position: -128px -44px; -} -.country-flag.flag-fr {background-position: -144px -44px; -} -.country-flag.flag-ga {background-position: -160px -44px; -} -.country-flag.flag-gb {background-position: -176px -44px; -} -.country-flag.flag-gd {background-position: -192px -44px; -} -.country-flag.flag-ge {background-position: -208px -44px; -} -.country-flag.flag-gf {background-position: -224px -44px; -} -.country-flag.flag-gg {background-position: -240px -44px; -} -.country-flag.flag-gh {background-position: 0 -55px; -} -.country-flag.flag-gi {background-position: -16px -55px; -} -.country-flag.flag-gl {background-position: -32px -55px; -} -.country-flag.flag-gm {background-position: -48px -55px; -} -.country-flag.flag-gn {background-position: -64px -55px; -} -.country-flag.flag-gp {background-position: -80px -55px; -} -.country-flag.flag-gq {background-position: -96px -55px; -} -.country-flag.flag-gr {background-position: -112px -55px; -} -.country-flag.flag-gs {background-position: -128px -55px; -} -.country-flag.flag-gt {background-position: -144px -55px; -} -.country-flag.flag-gu {background-position: -160px -55px; -} -.country-flag.flag-gw {background-position: -176px -55px; -} -.country-flag.flag-gy {background-position: -192px -55px; -} -.country-flag.flag-hk {background-position: -208px -55px; -} -.country-flag.flag-hm {background-position: -224px -55px; -} -.country-flag.flag-hn {background-position: -240px -55px; -} -.country-flag.flag-hr {background-position: 0 -66px; -} -.country-flag.flag-ht {background-position: -16px -66px; -} -.country-flag.flag-hu {background-position: -32px -66px; -} -.country-flag.flag-ic {background-position: -48px -66px; -} -.country-flag.flag-id {background-position: -64px -66px; -} -.country-flag.flag-ie {background-position: -80px -66px; -} -.country-flag.flag-il {background-position: -96px -66px; -} -.country-flag.flag-im {background-position: -112px -66px; -} -.country-flag.flag-in {background-position: -128px -66px; -} -.country-flag.flag-io {background-position: -144px -66px; -} -.country-flag.flag-iq {background-position: -160px -66px; -} -.country-flag.flag-ir {background-position: -176px -66px; -} -.country-flag.flag-is {background-position: -192px -66px; -} -.country-flag.flag-it {background-position: -208px -66px; -} -.country-flag.flag-je {background-position: -224px -66px; -} -.country-flag.flag-jm {background-position: -240px -66px; -} -.country-flag.flag-jo {background-position: 0 -77px; -} -.country-flag.flag-jp {background-position: -16px -77px; -} -.country-flag.flag-ke {background-position: -32px -77px; -} -.country-flag.flag-kg {background-position: -48px -77px; -} -.country-flag.flag-kh {background-position: -64px -77px; -} -.country-flag.flag-ki {background-position: -80px -77px; -} -.country-flag.flag-km {background-position: -96px -77px; -} -.country-flag.flag-kn {background-position: -112px -77px; -} -.country-flag.flag-kp {background-position: -128px -77px; -} -.country-flag.flag-kr {background-position: -144px -77px; -} -.country-flag.flag-kurdistan {background-position: -160px -77px; -} -.country-flag.flag-kw {background-position: -176px -77px; -} -.country-flag.flag-ky {background-position: -192px -77px; -} -.country-flag.flag-kz {background-position: -208px -77px; -} -.country-flag.flag-la {background-position: -224px -77px; -} -.country-flag.flag-lb {background-position: -240px -77px; -} -.country-flag.flag-lc {background-position: 0 -88px; -} -.country-flag.flag-li {background-position: -16px -88px; -} -.country-flag.flag-lk {background-position: -32px -88px; -} -.country-flag.flag-lr {background-position: -48px -88px; -} -.country-flag.flag-ls {background-position: -64px -88px; -} -.country-flag.flag-lt {background-position: -80px -88px; -} -.country-flag.flag-lu {background-position: -96px -88px; -} -.country-flag.flag-lv {background-position: -112px -88px; -} -.country-flag.flag-ly {background-position: -128px -88px; -} -.country-flag.flag-ma {background-position: -144px -88px; -} -.country-flag.flag-mc {background-position: -160px -88px; -} -.country-flag.flag-md {background-position: -176px -88px; -} -.country-flag.flag-me {background-position: -192px -88px; -} -.country-flag.flag-mg {background-position: -208px -88px; -} -.country-flag.flag-mh {background-position: -224px -88px; -} -.country-flag.flag-mk {background-position: -240px -88px; -} -.country-flag.flag-ml {background-position: 0 -99px; -} -.country-flag.flag-mm {background-position: -16px -99px; -} -.country-flag.flag-mn {background-position: -32px -99px; -} -.country-flag.flag-mo {background-position: -48px -99px; -} -.country-flag.flag-mp {background-position: -64px -99px; -} -.country-flag.flag-mq {background-position: -80px -99px; -} -.country-flag.flag-mr {background-position: -96px -99px; -} -.country-flag.flag-ms {background-position: -112px -99px; -} -.country-flag.flag-mt {background-position: -128px -99px; -} -.country-flag.flag-mu {background-position: -144px -99px; -} -.country-flag.flag-mv {background-position: -160px -99px; -} -.country-flag.flag-mw {background-position: -176px -99px; -} -.country-flag.flag-mx {background-position: -192px -99px; -} -.country-flag.flag-my {background-position: -208px -99px; -} -.country-flag.flag-mz {background-position: -224px -99px; -} -.country-flag.flag-na {background-position: -240px -99px; -} -.country-flag.flag-nc {background-position: 0 -110px; -} -.country-flag.flag-ne {background-position: -16px -110px; -} -.country-flag.flag-nf {background-position: -32px -110px; -} -.country-flag.flag-ng {background-position: -48px -110px; -} -.country-flag.flag-ni {background-position: -64px -110px; -} -.country-flag.flag-nl {background-position: -80px -110px; -} -.country-flag.flag-no {background-position: -96px -110px; -} -.country-flag.flag-np {background-position: -112px -110px; -} -.country-flag.flag-nr {background-position: -128px -110px; -} -.country-flag.flag-nu {background-position: -144px -110px; -} -.country-flag.flag-nz {background-position: -160px -110px; -} -.country-flag.flag-om {background-position: -176px -110px; -} -.country-flag.flag-pa {background-position: -192px -110px; -} -.country-flag.flag-pe {background-position: -208px -110px; -} -.country-flag.flag-pf {background-position: -224px -110px; -} -.country-flag.flag-pg {background-position: -240px -110px; -} -.country-flag.flag-ph {background-position: 0 -121px; -} -.country-flag.flag-pk {background-position: -16px -121px; -} -.country-flag.flag-pl {background-position: -32px -121px; -} -.country-flag.flag-pm {background-position: -48px -121px; -} -.country-flag.flag-pn {background-position: -64px -121px; -} -.country-flag.flag-pr {background-position: -80px -121px; -} -.country-flag.flag-ps {background-position: -96px -121px; -} -.country-flag.flag-pt {background-position: -112px -121px; -} -.country-flag.flag-pw {background-position: -128px -121px; -} -.country-flag.flag-py {background-position: -144px -121px; -} -.country-flag.flag-qa {background-position: -160px -121px; -} -.country-flag.flag-re {background-position: -176px -121px; -} -.country-flag.flag-ro {background-position: -192px -121px; -} -.country-flag.flag-rs {background-position: -208px -121px; -} -.country-flag.flag-ru {background-position: -224px -121px; -} -.country-flag.flag-rw {background-position: -240px -121px; -} -.country-flag.flag-sa {background-position: 0 -132px; -} -.country-flag.flag-sb {background-position: -16px -132px; -} -.country-flag.flag-sc {background-position: -32px -132px; -} -.country-flag.flag-scotland {background-position: -48px -132px; -} -.country-flag.flag-sd {background-position: -64px -132px; -} -.country-flag.flag-se {background-position: -80px -132px; -} -.country-flag.flag-sg {background-position: -96px -132px; -} -.country-flag.flag-sh {background-position: -112px -132px; -} -.country-flag.flag-si {background-position: -128px -132px; -} -.country-flag.flag-sk {background-position: -144px -132px; -} -.country-flag.flag-sl {background-position: -160px -132px; -} -.country-flag.flag-sm {background-position: -176px -132px; -} -.country-flag.flag-sn {background-position: -192px -132px; -} -.country-flag.flag-so {background-position: -208px -132px; -} -.country-flag.flag-somaliland {background-position: -224px -132px; -} -.country-flag.flag-sr {background-position: -240px -132px; -} -.country-flag.flag-ss {background-position: 0 -143px; -} -.country-flag.flag-st {background-position: -16px -143px; -} -.country-flag.flag-sv {background-position: -32px -143px; -} -.country-flag.flag-sx {background-position: -48px -143px; -} -.country-flag.flag-sy {background-position: -64px -143px; -} -.country-flag.flag-sz {background-position: -80px -143px; -} -.country-flag.flag-tc {background-position: -96px -143px; -} -.country-flag.flag-td {background-position: -112px -143px; -} -.country-flag.flag-tf {background-position: -128px -143px; -} -.country-flag.flag-tg {background-position: -144px -143px; -} -.country-flag.flag-th {background-position: -160px -143px; -} -.country-flag.flag-tj {background-position: -176px -143px; -} -.country-flag.flag-tk {background-position: -192px -143px; -} -.country-flag.flag-tl {background-position: -208px -143px; -} -.country-flag.flag-tm {background-position: -224px -143px; -} -.country-flag.flag-tn {background-position: -240px -143px; -} -.country-flag.flag-to {background-position: 0 -154px; -} -.country-flag.flag-tr {background-position: -16px -154px; -} -.country-flag.flag-tt {background-position: -32px -154px; -} -.country-flag.flag-tv {background-position: -48px -154px; -} -.country-flag.flag-tw {background-position: -64px -154px; -} -.country-flag.flag-tz {background-position: -80px -154px; -} -.country-flag.flag-ua {background-position: -96px -154px; -} -.country-flag.flag-ug {background-position: -112px -154px; -} -.country-flag.flag-um {background-position: -128px -154px; -} -.country-flag.flag-us {background-position: -144px -154px; -} -.country-flag.flag-uy {background-position: -160px -154px; -} -.country-flag.flag-uz {background-position: -176px -154px; -} -.country-flag.flag-va {background-position: -192px -154px; -} -.country-flag.flag-vc {background-position: -208px -154px; -} -.country-flag.flag-ve {background-position: -224px -154px; -} -.country-flag.flag-vg {background-position: -240px -154px; -} -.country-flag.flag-vi {background-position: 0 -165px; -} -.country-flag.flag-vn {background-position: -16px -165px; -} -.country-flag.flag-vu {background-position: -32px -165px; -} -.country-flag.flag-wales {background-position: -48px -165px; -} -.country-flag.flag-wf {background-position: -64px -165px; -} -.country-flag.flag-ws {background-position: -80px -165px; -} -.country-flag.flag-ye {background-position: -96px -165px; -} -.country-flag.flag-yt {background-position: -112px -165px; -} -.country-flag.flag-za {background-position: -128px -165px; -} -.country-flag.flag-zanzibar {background-position: -144px -165px; -} -.country-flag.flag-zm {background-position: -160px -165px; -} -.country-flag.flag-zw {background-position: -176px -165px; -} +.country-flag.flag-ad {background-position: -16px 0} +.country-flag.flag-ae {background-position: -32px 0} +.country-flag.flag-af {background-position: -48px 0} +.country-flag.flag-ag {background-position: -64px 0} +.country-flag.flag-ai {background-position: -80px 0} +.country-flag.flag-al {background-position: -96px 0} +.country-flag.flag-am {background-position: -112px 0} +.country-flag.flag-an {background-position: -128px 0} +.country-flag.flag-ao {background-position: -144px 0} +.country-flag.flag-ar {background-position: -160px 0} +.country-flag.flag-as {background-position: -176px 0} +.country-flag.flag-at {background-position: -192px 0} +.country-flag.flag-au {background-position: -208px 0} +.country-flag.flag-aw {background-position: -224px 0} +.country-flag.flag-az {background-position: -240px 0} +.country-flag.flag-ba {background-position: 0 -11px} +.country-flag.flag-bb {background-position: -16px -11px} +.country-flag.flag-bd {background-position: -32px -11px} +.country-flag.flag-be {background-position: -48px -11px} +.country-flag.flag-bf {background-position: -64px -11px} +.country-flag.flag-bg {background-position: -80px -11px} +.country-flag.flag-bh {background-position: -96px -11px} +.country-flag.flag-bi {background-position: -112px -11px} +.country-flag.flag-bj {background-position: -128px -11px} +.country-flag.flag-bm {background-position: -144px -11px} +.country-flag.flag-bn {background-position: -160px -11px} +.country-flag.flag-bo {background-position: -176px -11px} +.country-flag.flag-br {background-position: -192px -11px} +.country-flag.flag-bs {background-position: -208px -11px} +.country-flag.flag-bt {background-position: -224px -11px} +.country-flag.flag-bv {background-position: -240px -11px} +.country-flag.flag-bw {background-position: 0 -22px} +.country-flag.flag-by {background-position: -16px -22px} +.country-flag.flag-bz {background-position: -32px -22px} +.country-flag.flag-ca {background-position: -48px -22px} +.country-flag.flag-catalonia {background-position: -64px -22px} +.country-flag.flag-cd {background-position: -80px -22px} +.country-flag.flag-cf {background-position: -96px -22px} +.country-flag.flag-cg {background-position: -112px -22px} +.country-flag.flag-ch {background-position: -128px -22px} +.country-flag.flag-ci {background-position: -144px -22px} +.country-flag.flag-ck {background-position: -160px -22px} +.country-flag.flag-cl {background-position: -176px -22px} +.country-flag.flag-cm {background-position: -192px -22px} +.country-flag.flag-cn {background-position: -208px -22px} +.country-flag.flag-co {background-position: -224px -22px} +.country-flag.flag-cr {background-position: -240px -22px} +.country-flag.flag-cu {background-position: 0 -33px} +.country-flag.flag-cv {background-position: -16px -33px} +.country-flag.flag-cw {background-position: -32px -33px} +.country-flag.flag-cy {background-position: -48px -33px} +.country-flag.flag-cz {background-position: -64px -33px} +.country-flag.flag-de {background-position: -80px -33px} +.country-flag.flag-dj {background-position: -96px -33px} +.country-flag.flag-dk {background-position: -112px -33px} +.country-flag.flag-dm {background-position: -128px -33px} +.country-flag.flag-do {background-position: -144px -33px} +.country-flag.flag-dz {background-position: -160px -33px} +.country-flag.flag-ec {background-position: -176px -33px} +.country-flag.flag-ee {background-position: -192px -33px} +.country-flag.flag-eg {background-position: -208px -33px} +.country-flag.flag-eh {background-position: -224px -33px} +.country-flag.flag-england {background-position: -240px -33px} +.country-flag.flag-er {background-position: 0 -44px} +.country-flag.flag-es {background-position: -16px -44px} +.country-flag.flag-et {background-position: -32px -44px} +.country-flag.flag-eu {background-position: -48px -44px} +.country-flag.flag-fi {background-position: -64px -44px} +.country-flag.flag-fj {background-position: -80px -44px} +.country-flag.flag-fk {background-position: -96px -44px} +.country-flag.flag-fm {background-position: -112px -44px} +.country-flag.flag-fo {background-position: -128px -44px} +.country-flag.flag-fr {background-position: -144px -44px} +.country-flag.flag-ga {background-position: -160px -44px} +.country-flag.flag-gb {background-position: -176px -44px} +.country-flag.flag-gd {background-position: -192px -44px} +.country-flag.flag-ge {background-position: -208px -44px} +.country-flag.flag-gf {background-position: -224px -44px} +.country-flag.flag-gg {background-position: -240px -44px} +.country-flag.flag-gh {background-position: 0 -55px} +.country-flag.flag-gi {background-position: -16px -55px} +.country-flag.flag-gl {background-position: -32px -55px} +.country-flag.flag-gm {background-position: -48px -55px} +.country-flag.flag-gn {background-position: -64px -55px} +.country-flag.flag-gp {background-position: -80px -55px} +.country-flag.flag-gq {background-position: -96px -55px} +.country-flag.flag-gr {background-position: -112px -55px} +.country-flag.flag-gs {background-position: -128px -55px} +.country-flag.flag-gt {background-position: -144px -55px} +.country-flag.flag-gu {background-position: -160px -55px} +.country-flag.flag-gw {background-position: -176px -55px} +.country-flag.flag-gy {background-position: -192px -55px} +.country-flag.flag-hk {background-position: -208px -55px} +.country-flag.flag-hm {background-position: -224px -55px} +.country-flag.flag-hn {background-position: -240px -55px} +.country-flag.flag-hr {background-position: 0 -66px} +.country-flag.flag-ht {background-position: -16px -66px} +.country-flag.flag-hu {background-position: -32px -66px} +.country-flag.flag-ic {background-position: -48px -66px} +.country-flag.flag-id {background-position: -64px -66px} +.country-flag.flag-ie {background-position: -80px -66px} +.country-flag.flag-il {background-position: -96px -66px} +.country-flag.flag-im {background-position: -112px -66px} +.country-flag.flag-in {background-position: -128px -66px} +.country-flag.flag-io {background-position: -144px -66px} +.country-flag.flag-iq {background-position: -160px -66px} +.country-flag.flag-ir {background-position: -176px -66px} +.country-flag.flag-is {background-position: -192px -66px} +.country-flag.flag-it {background-position: -208px -66px} +.country-flag.flag-je {background-position: -224px -66px} +.country-flag.flag-jm {background-position: -240px -66px} +.country-flag.flag-jo {background-position: 0 -77px} +.country-flag.flag-jp {background-position: -16px -77px} +.country-flag.flag-ke {background-position: -32px -77px} +.country-flag.flag-kg {background-position: -48px -77px} +.country-flag.flag-kh {background-position: -64px -77px} +.country-flag.flag-ki {background-position: -80px -77px} +.country-flag.flag-km {background-position: -96px -77px} +.country-flag.flag-kn {background-position: -112px -77px} +.country-flag.flag-kp {background-position: -128px -77px} +.country-flag.flag-kr {background-position: -144px -77px} +.country-flag.flag-kurdistan {background-position: -160px -77px} +.country-flag.flag-kw {background-position: -176px -77px} +.country-flag.flag-ky {background-position: -192px -77px} +.country-flag.flag-kz {background-position: -208px -77px} +.country-flag.flag-la {background-position: -224px -77px} +.country-flag.flag-lb {background-position: -240px -77px} +.country-flag.flag-lc {background-position: 0 -88px} +.country-flag.flag-li {background-position: -16px -88px} +.country-flag.flag-lk {background-position: -32px -88px} +.country-flag.flag-lr {background-position: -48px -88px} +.country-flag.flag-ls {background-position: -64px -88px} +.country-flag.flag-lt {background-position: -80px -88px} +.country-flag.flag-lu {background-position: -96px -88px} +.country-flag.flag-lv {background-position: -112px -88px} +.country-flag.flag-ly {background-position: -128px -88px} +.country-flag.flag-ma {background-position: -144px -88px} +.country-flag.flag-mc {background-position: -160px -88px} +.country-flag.flag-md {background-position: -176px -88px} +.country-flag.flag-me {background-position: -192px -88px} +.country-flag.flag-mg {background-position: -208px -88px} +.country-flag.flag-mh {background-position: -224px -88px} +.country-flag.flag-mk {background-position: -240px -88px} +.country-flag.flag-ml {background-position: 0 -99px} +.country-flag.flag-mm {background-position: -16px -99px} +.country-flag.flag-mn {background-position: -32px -99px} +.country-flag.flag-mo {background-position: -48px -99px} +.country-flag.flag-mp {background-position: -64px -99px} +.country-flag.flag-mq {background-position: -80px -99px} +.country-flag.flag-mr {background-position: -96px -99px} +.country-flag.flag-ms {background-position: -112px -99px} +.country-flag.flag-mt {background-position: -128px -99px} +.country-flag.flag-mu {background-position: -144px -99px} +.country-flag.flag-mv {background-position: -160px -99px} +.country-flag.flag-mw {background-position: -176px -99px} +.country-flag.flag-mx {background-position: -192px -99px} +.country-flag.flag-my {background-position: -208px -99px} +.country-flag.flag-mz {background-position: -224px -99px} +.country-flag.flag-na {background-position: -240px -99px} +.country-flag.flag-nc {background-position: 0 -110px} +.country-flag.flag-ne {background-position: -16px -110px} +.country-flag.flag-nf {background-position: -32px -110px} +.country-flag.flag-ng {background-position: -48px -110px} +.country-flag.flag-ni {background-position: -64px -110px} +.country-flag.flag-nl {background-position: -80px -110px} +.country-flag.flag-no {background-position: -96px -110px} +.country-flag.flag-np {background-position: -112px -110px} +.country-flag.flag-nr {background-position: -128px -110px} +.country-flag.flag-nu {background-position: -144px -110px} +.country-flag.flag-nz {background-position: -160px -110px} +.country-flag.flag-om {background-position: -176px -110px} +.country-flag.flag-pa {background-position: -192px -110px} +.country-flag.flag-pe {background-position: -208px -110px} +.country-flag.flag-pf {background-position: -224px -110px} +.country-flag.flag-pg {background-position: -240px -110px} +.country-flag.flag-ph {background-position: 0 -121px} +.country-flag.flag-pk {background-position: -16px -121px} +.country-flag.flag-pl {background-position: -32px -121px} +.country-flag.flag-pm {background-position: -48px -121px} +.country-flag.flag-pn {background-position: -64px -121px} +.country-flag.flag-pr {background-position: -80px -121px} +.country-flag.flag-ps {background-position: -96px -121px} +.country-flag.flag-pt {background-position: -112px -121px} +.country-flag.flag-pw {background-position: -128px -121px} +.country-flag.flag-py {background-position: -144px -121px} +.country-flag.flag-qa {background-position: -160px -121px} +.country-flag.flag-re {background-position: -176px -121px} +.country-flag.flag-ro {background-position: -192px -121px} +.country-flag.flag-rs {background-position: -208px -121px} +.country-flag.flag-ru {background-position: -224px -121px} +.country-flag.flag-rw {background-position: -240px -121px} +.country-flag.flag-sa {background-position: 0 -132px} +.country-flag.flag-sb {background-position: -16px -132px} +.country-flag.flag-sc {background-position: -32px -132px} +.country-flag.flag-scotland {background-position: -48px -132px} +.country-flag.flag-sd {background-position: -64px -132px} +.country-flag.flag-se {background-position: -80px -132px} +.country-flag.flag-sg {background-position: -96px -132px} +.country-flag.flag-sh {background-position: -112px -132px} +.country-flag.flag-si {background-position: -128px -132px} +.country-flag.flag-sk {background-position: -144px -132px} +.country-flag.flag-sl {background-position: -160px -132px} +.country-flag.flag-sm {background-position: -176px -132px} +.country-flag.flag-sn {background-position: -192px -132px} +.country-flag.flag-so {background-position: -208px -132px} +.country-flag.flag-somaliland {background-position: -224px -132px} +.country-flag.flag-sr {background-position: -240px -132px} +.country-flag.flag-ss {background-position: 0 -143px} +.country-flag.flag-st {background-position: -16px -143px} +.country-flag.flag-sv {background-position: -32px -143px} +.country-flag.flag-sx {background-position: -48px -143px} +.country-flag.flag-sy {background-position: -64px -143px} +.country-flag.flag-sz {background-position: -80px -143px} +.country-flag.flag-tc {background-position: -96px -143px} +.country-flag.flag-td {background-position: -112px -143px} +.country-flag.flag-tf {background-position: -128px -143px} +.country-flag.flag-tg {background-position: -144px -143px} +.country-flag.flag-th {background-position: -160px -143px} +.country-flag.flag-tj {background-position: -176px -143px} +.country-flag.flag-tk {background-position: -192px -143px} +.country-flag.flag-tl {background-position: -208px -143px} +.country-flag.flag-tm {background-position: -224px -143px} +.country-flag.flag-tn {background-position: -240px -143px} +.country-flag.flag-to {background-position: 0 -154px} +.country-flag.flag-tr {background-position: -16px -154px} +.country-flag.flag-tt {background-position: -32px -154px} +.country-flag.flag-tv {background-position: -48px -154px} +.country-flag.flag-tw {background-position: -64px -154px} +.country-flag.flag-tz {background-position: -80px -154px} +.country-flag.flag-ua {background-position: -96px -154px} +.country-flag.flag-ug {background-position: -112px -154px} +.country-flag.flag-um {background-position: -128px -154px} +.country-flag.flag-us {background-position: -144px -154px} +.country-flag.flag-uy {background-position: -160px -154px} +.country-flag.flag-uz {background-position: -176px -154px} +.country-flag.flag-va {background-position: -192px -154px} +.country-flag.flag-vc {background-position: -208px -154px} +.country-flag.flag-ve {background-position: -224px -154px} +.country-flag.flag-vg {background-position: -240px -154px} +.country-flag.flag-vi {background-position: 0 -165px} +.country-flag.flag-vn {background-position: -16px -165px} +.country-flag.flag-vu {background-position: -32px -165px} +.country-flag.flag-wales {background-position: -48px -165px} +.country-flag.flag-wf {background-position: -64px -165px} +.country-flag.flag-ws {background-position: -80px -165px} +.country-flag.flag-ye {background-position: -96px -165px} +.country-flag.flag-yt {background-position: -112px -165px} +.country-flag.flag-za {background-position: -128px -165px} +.country-flag.flag-zanzibar {background-position: -144px -165px} +.country-flag.flag-zm {background-position: -160px -165px} +.country-flag.flag-zw {background-position: -176px -165px} diff --git a/gui/slick/css/dark.css b/gui/slick/css/dark.css index 402920c417..3036a9bd53 100644 --- a/gui/slick/css/dark.css +++ b/gui/slick/css/dark.css @@ -4,34 +4,34 @@ inc_top.mako .ui-dialog, .ui-dialog-buttonpane { - background: rgb(42, 42, 42) !important; + background: #2a2a2a !important; } .ui-widget-content { - background: rgb(96, 96, 96); - border: 1px solid rgb(17, 17, 17); - color: rgb(255, 255, 255); + background: #606060; + border: 1px solid #111; + color: #fff; } .ui-widget-content a { - color: rgb(255, 255, 255); + color: #fff; } .ui-widget-content a:hover { - color: rgb(9, 162, 255); + color: #09A2FF; text-decoration: none; } .ui-widget-header { - background: rgb(61, 61, 61); - border: 1px solid rgb(17, 17, 17); - color: rgb(255, 255, 255); + background: #3d3d3d; + border: 1px solid #111; + color: #fff; } .ui-state-default, .ui-widget-content .ui-state-default, .ui-widget-header .ui-state-default { - border: 1px solid rgb(17, 17, 17); + border: 1px solid #111; } .ui-state-hover, @@ -40,13 +40,13 @@ inc_top.mako .ui-state-focus, .ui-widget-content .ui-state-focus, .ui-widget-header .ui-state-focus { - background: rgb(61, 61, 61); + background: #3d3d3d; } .ui-state-active, .ui-widget-content .ui-state-active, .ui-widget-header .ui-state-active { - background: rgb(61, 61, 61); + background: #3d3d3d; } .ui-icon, @@ -59,31 +59,31 @@ inc_top.mako } .ui-widget-overlay { - background: rgb(0, 0, 0) url("../css/lib/images/ui-bg_flat_0_000000_40x100.png") 50% 50% repeat-x; + background: #000000 url("../css/lib/images/ui-bg_flat_0_000000_40x100.png") 50% 50% repeat-x; } .ui-state-default a, .ui-state-default a:link, .ui-state-default a:visited { - color: rgb(255, 255, 255); + color: #fff; text-decoration: none; } .ui-dialog .ui-dialog-titlebar-close { - background: rgb(51, 51, 51); + background: #333; } .ui-tabs { - padding: 0; + padding: 0px; background: none; - border-width: 0; + border-width: 0px; } .ui-tabs .ui-tabs-panel { - background-color: rgb(61, 61, 61) !important; - border: 1px solid rgb(17, 17, 17) !important; + background-color: #3d3d3d !important; + border: 1px solid #111 !important; } .ui-tabs-nav > :not(.ui-tabs-active){ - background: rgb(51, 51, 51); + background: #333; border-top-left-radius: 5px; border-top-right-radius: 5px; } @@ -95,13 +95,13 @@ inc_bottom.mako .footer { width: 100%; padding: 20px 0; - color: rgb(255, 255, 255); + color: #fff; text-align: center; font-size: 12px; } .footerhighlight { - color: rgb(9, 162, 255); + color: #09A2FF; display: inline; } @@ -110,9 +110,10 @@ home.mako ========================================================================== */ .imgbanner .banner { - border: 1px solid rgb(17, 17, 17); + border: 1px solid #111; overflow: hidden; height: 66px; + overflow: hidden; border-radius: 8px; vertical-align: top; width: 360px; @@ -127,7 +128,7 @@ home.mako border-radius: 3px; vertical-align: middle; width: 45px; - border: 1px solid rgb(17, 17, 17); + border: 1px solid #111; margin-right: 5px; } @@ -138,10 +139,10 @@ home.mako height: 100%; overflow: visible; text-align: center; - text-shadow: 0 0 0.1em rgb(0, 0, 0); + text-shadow: 0 0 0.1em #000; vertical-align: middle; font-size: 12px; - color: rgb(255, 255, 255); + color: #fff; } .loading-spinner { @@ -149,8 +150,8 @@ home.mako } .show-container { - background-color: rgb(51, 51, 51); - border-color: rgb(51, 51, 51); + background-color: #333; + border-color: #333; } .show-title:after { @@ -184,26 +185,26 @@ home.mako } td.tvShow a { - color: rgb(255, 255, 255); + color: #fff; text-decoration: none; } td.tvShow a:hover { cursor: pointer; - color: rgb(9, 162, 255); + color: #09A2FF; } .popover { margin-left: -50px; - background-color: rgb(51, 51, 51); + background-color: #333; } .popover-content { - background-color: rgb(51, 51, 51); + background-color: #333; } .popover.bottom .arrow:after { - border-bottom-color: rgb(51, 51, 51); + border-bottom-color: #333; } /* ======================================================================= @@ -219,8 +220,8 @@ home_newShow.mako padding: 8px; overflow: hidden; font-size: 14px; - background-color: rgb(61, 61, 61); - border: 1px solid rgb(17, 17, 17); + background-color: #3d3d3d; + border: 1px solid #111; } /* ======================================================================= @@ -232,7 +233,7 @@ ul#rootDirStaticList li { margin: 2px; list-style: none outside none; cursor: pointer; - background: rgb(61, 61, 61); + background: #3d3d3d; } /* ======================================================================= @@ -242,8 +243,8 @@ home_trendingShows.mako .traktContainer { margin: 12px; width: 188px; - background-color: rgb(51, 51, 51); - border: 1px solid rgb(17, 17, 17); + background-color: #333; + border: 1px solid #111; border-radius: 6px; } @@ -264,24 +265,24 @@ h1.title { line-height: 30px; text-align: left; text-rendering: optimizelegibility; - border-bottom: 1px solid rgb(85, 85, 85); + border-bottom: 1px solid #555; } ul.tags li { margin-right: 4px; margin-bottom: 5px; padding: 3px 4px 3px 25px; - background: url(../images/tag.png) no-repeat scroll 5px 4px rgb(21, 82, 143); + background: url(../images/tag.png) no-repeat scroll 5px 4px #15528F; border-radius: 3px; - border: 1px solid rgb(17, 17, 17); - color: rgb(255, 255, 255); + border: 1px solid #111; + color: #FFF; font: 14px/18px "Open Sans", "Helvetica Neue", Helvetica, Arial, Geneva, sans-serif; - text-shadow: 0 1px rgba(0, 0, 0, 0.8); + text-shadow: 0px 1px rgba(0, 0, 0, 0.8); float: left; } .tvshowImg { - border: 1px solid rgb(17, 17, 17); + border: 1px solid #111; border-radius: 5px; height: 311px; width: auto; @@ -290,8 +291,8 @@ ul.tags li { #summary { padding: 10px; - background-color: rgb(61, 61, 61); - border: 1px solid rgb(17, 17, 17); + background-color: #3d3d3d; + border: 1px solid #111; width: 100%; height: 250px; overflow: auto; @@ -306,27 +307,27 @@ ul.tags li { text-align: center; border: none; empty-cells: show; - color: rgb(0, 0, 0); + color: #000; } .sickbeardTable th{ - color: rgb(255, 255, 255); + color: #fff; text-align: center; - background-color: rgb(21, 82, 143); + background-color: #15528F; white-space: nowrap; } .sickbeardTable th, .sickbeardTable td { - border-top: 1px solid rgb(34, 34, 34); - border-left: 1px solid rgb(34, 34, 34); + border-top: 1px solid #222; + border-left: 1px solid #222; padding: 4px; } th.row-seasonheader { border: none; - background-color: rgb(34, 34, 34); - color: rgb(255, 255, 255); + background-color: #222; + color: #fff; padding-top: 15px; text-align: left; } @@ -355,54 +356,54 @@ h2.day, h2.network { line-height: 36px; font-weight: bold; letter-spacing: 1px; - color: rgb(255, 255, 255); + color: #FFF; text-align: center; - text-shadow: -1px -1px 0 rgba(0, 0, 0, 0.3); - background-color: rgb(21, 82, 143); + text-shadow: -1px -1px 0px rgba(0, 0, 0, 0.3); + background-color: #15528F; } .tvshowDiv { display: block; clear: both; - border: 1px solid rgb(204, 204, 204); + border: 1px solid #ccc; margin: auto; - padding: 0; + padding: 0px; text-align: left; width: 750px; border-radius: 5px; - background: rgb(255, 255, 255); + background: #fff; cursor: default; overflow: hidden; - color: rgb(0, 0, 0); + color: #000; } .tvshowDiv a:hover { - color: rgb(9, 162, 255); + color: #09A2FF; } #showListTable td.tvShow a { - color: rgb(0, 0, 0); + color: #000; } #showListTable td.tvShow a:hover { cursor: pointer; - color: rgb(9, 162, 255); + color: #09A2FF; } table.cal-odd { - background-color: rgb(51, 51, 51); + background-color: #333; } table.cal-even { - background-color: rgb(61, 61, 61); + background-color: #3d3d3d; } .calendarShow .text .airtime { - color: rgb(255, 255, 255); + color:#fff } .calendarShow .text .episode-title { - color: rgb(170, 170, 170); + color:#aaa } /* ======================================================================= @@ -411,14 +412,14 @@ config*.mako .component-group { padding: 15px 15px 25px; - border-bottom: 1px dotted rgb(85, 85, 85); + border-bottom: 1px dotted #555; min-height: 200px; } .component-group-desc p { width: 90%; margin: 10px 0; - color: rgb(221, 221, 221); + color: #ddd; } #provider_order_list li, @@ -426,16 +427,16 @@ config*.mako padding: 5px; margin: 5px 0; font-size: 14px; - background: rgb(51, 51, 51) !important; - color: rgb(255, 255, 255); + background: #333 !important; + color: #fff; } #provider_order_list .ui-state-default.torrent-provider { - background-color: rgb(85, 85, 85) !important; + background-color: #555 !important; } #provider_order_list .ui-state-default.nzb-provider { - background-color: rgb(34, 34, 34) !important; + background-color: #222 !important; } /* ======================================================================= @@ -444,16 +445,16 @@ config_postProcessing.mako #config div.example { padding: 10px; - background-color: rgb(51, 51, 51); - border: 1px solid rgb(17, 17, 17); + background-color: #333333; + border: 1px solid #111; } .Key { width: 100%; padding: 6px; font-size: 13px; - background-color: rgb(61, 61, 61); - border: 1px solid rgb(17, 17, 17); + background-color: #3d3d3d; + border: 1px solid #111; border-collapse: collapse; border-spacing: 0; } @@ -461,17 +462,17 @@ config_postProcessing.mako .Key th, .tableHeader { padding: 3px 9px; margin: 0; - color: rgb(255, 255, 255); + color: #fff; text-align: center; - background: rgb(21, 82, 143); + background: #15528F; } .Key tr { - border-bottom: 1px solid rgb(17, 17, 17); + border-bottom: 1px solid #111; } .Key tr.even { - background-color: rgb(51, 51, 51); + background-color: #333; } /* ======================================================================= @@ -481,14 +482,14 @@ config_notifications.mako div.metadata_options { padding: 7px; overflow: auto; - background: rgb(51, 51, 51); - color: rgb(255, 255, 255); - border: 1px solid rgb(17, 17, 17); + background: #333; + color: #fff; + border: 1px solid #111; } div.metadata_options label:hover { - color: rgb(255, 255, 255); - background-color: rgb(21, 82, 143); + color: #fff; + background-color: #15528F; cursor: pointer; } @@ -496,13 +497,13 @@ div.metadata_options label { display: block; padding-left: 7px; line-height: 20px; - color: rgb(255, 255, 255); + color: #fff; } div.metadata_example label { display: block; line-height: 21px; - color: rgb(255, 255, 255); + color: #fff; cursor: pointer; } @@ -512,7 +513,7 @@ manage*.mako .separator { font-size: 90%; - color: rgb(255, 255, 255); + color: #fff; } a.whitelink { @@ -524,8 +525,8 @@ Global span.path { padding: 3px 6px; - color: rgb(9, 162, 255); - background-color: rgb(51, 51, 51); + color: #09A2FF; + background-color: #333; } /* ======================================================================= @@ -536,71 +537,71 @@ body { padding-top: 60px; overflow-y: scroll; font-family: "Open Sans", "Helvetica Neue", Helvetica, Arial, sans-serif; - color: rgb(255, 255, 255); - background-color: rgb(34, 34, 34); + color: #fff; + background-color: #222; } /* navbar styling */ .navbar-default { - background-color: rgb(21, 82, 143); + background-color: #15528F; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#297AB8', endColorstr='#15528F'); - background: -webkit-gradient(linear, left top, left bottom, from(rgb(41, 122, 184)), to(rgb(21, 82, 143))); - background: -moz-linear-gradient(top, rgb(41, 122, 184), rgb(21, 82, 143)); - border-color: rgb(62, 63, 58); + background: -webkit-gradient(linear, left top, left bottom, from(#297AB8), to(#15528F)); + background: -moz-linear-gradient(top, #297AB8, #15528F); + border-color: #3e3f3a; } .navbar-default .navbar-nav > li > a:hover, .navbar-default .navbar-nav > li > a:focus { - background-color: rgb(18, 68, 119); + background-color: #124477; } .navbar-default .navbar-nav > .active > a, .navbar-default .navbar-nav > .active > a:hover, .navbar-default .navbar-nav > .active > a:focus { - color: rgb(255, 255, 255); - background-color: rgb(18, 68, 119); + color: #ffffff; + background-color: #124477; } .navbar-default .navbar-toggle:hover, .navbar-default .navbar-toggle:focus { - background-color: rgb(18, 68, 119); + background-color: #124477; } .navbar-default .navbar-toggle .icon-bar { - background-color: rgb(18, 68, 119); + background-color: #124477; } .navbar-default .navbar-nav > .open > a, .navbar-default .navbar-nav > .open > a:hover, .navbar-default .navbar-nav > .open > a:focus { - background-color: rgb(18, 68, 119); - color: rgb(255, 255, 255); + background-color: #124477; + color: #ffffff; } @media (max-width: 767px) { .navbar-default .navbar-nav .open .dropdown-menu > .active > a, .navbar-default .navbar-nav .open .dropdown-menu > .active > a:hover, .navbar-default .navbar-nav .open .dropdown-menu > .active > a:focus { - color: rgb(255, 255, 255); - background-color: rgb(18, 68, 119); + color: #ffffff; + background-color: #124477; } } .dropdown-menu > li > a:hover, .dropdown-menu > li > a:focus { - color: rgb(255, 255, 255); + color: #fff; text-decoration: none; - background-color: rgb(21, 82, 143); + background-color: #15528F; } .dropdown-menu > li > a { padding: 4px 36px 4px 20px; - color: rgb(255, 255, 255); + color: #fff; } .dropdown-menu { - background-color: rgb(51, 51, 51); + background-color: #333; border: 1px solid rgba(0, 0, 0, 0.15); - box-shadow: 0 6px 12px rgba(0, 0, 0, 0.176); + box-shadow: 0px 6px 12px rgba(0, 0, 0, 0.176); } .btn { @@ -612,25 +613,25 @@ body { font-size: 12px; line-height: 16px; *line-height: 20px; - color: rgb(255, 255, 255); + color: #fff; text-align: center; text-shadow: 0 1px 1px rgba(0, 0, 0, 0.75); vertical-align: middle; cursor: pointer; - background-color: rgb(38, 114, 182); - *background-color: rgb(38, 114, 182); - background-image: -ms-linear-gradient(top, rgb(41, 122, 184), rgb(21, 82, 143)); - background-image: -webkit-gradient(linear, 0 0, 0 100%, from(rgb(41, 122, 184)), to(rgb(21, 82, 143))); - background-image: -webkit-linear-gradient(top, rgb(41, 122, 184), rgb(21, 82, 143)); - background-image: -o-linear-gradient(top, rgb(41, 122, 184), rgb(21, 82, 143)); - background-image: linear-gradient(top, rgb(41, 122, 184), rgb(21, 82, 143)); - background-image: -moz-linear-gradient(top, rgb(41, 122, 184), rgb(21, 82, 143)); + background-color: #2672B6; + *background-color: #2672B6; + background-image: -ms-linear-gradient(top, #297AB8, #15528F); + background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#297AB8), to(#15528F)); + background-image: -webkit-linear-gradient(top, #297AB8, #15528F); + background-image: -o-linear-gradient(top, #297AB8, #15528F); + background-image: linear-gradient(top, #297AB8, #15528F); + background-image: -moz-linear-gradient(top, #297AB8, #15528F); background-repeat: repeat-x; - border: 1px solid rgb(17, 17, 17); + border: 1px solid #111; *border: 0; border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); - border-color: rgb(17, 17, 17) rgb(17, 17, 17) rgb(17, 17, 17); - border-bottom-color: rgb(17, 17, 17); + border-color: #111 #111 #111; + border-bottom-color: #111; -webkit-border-radius: 4px; -moz-border-radius: 4px; border-radius: 4px; @@ -647,22 +648,22 @@ body { .btn.active, .btn.disabled, .btn[disabled] { - background-color: rgb(38, 114, 182); - *background-color: rgb(38, 114, 182); - color: rgb(255, 255, 255); + background-color: #2672B6; + *background-color: #2672B6; + color: #fff; } .btn:active, .btn.active { - background-color: rgb(204, 204, 204) \9; - color: rgb(255, 255, 255); + background-color: #cccccc \9; + color: #fff; } .btn:hover { - color: rgb(255, 255, 255); + color: #fff; text-decoration: none; - background-color: rgb(38, 114, 182); - *background-color: rgb(38, 114, 182); + background-color: #2672B6; + *background-color: #2672B6; background-position: 0 -150px; -webkit-transition: background-position 0.0s linear; -moz-transition: background-position 0.0s linear; @@ -672,18 +673,18 @@ body { } .btn:focus { - outline: thin dotted rgb(51, 51, 51); + outline: thin dotted #333; outline: 5px auto -webkit-focus-ring-color; outline-offset: -2px; - color: rgb(255, 255, 255); + color: #fff; } .btn.active, .btn:active { - background-color: rgb(38, 114, 182); - background-color: rgb(38, 114, 182) \9; + background-color: #2672B6; + background-color: #2672B6 \9; background-image: none; - color: rgb(255, 255, 255); + color: #fff; outline: 0; -webkit-box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05); -moz-box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05); @@ -693,7 +694,7 @@ body { .btn.disabled, .btn[disabled] { cursor: default; - background-color: rgb(21, 82, 143); + background-color: #15528F; background-image: none; opacity: 0.65; filter: alpha(opacity=65); @@ -709,16 +710,16 @@ body { right: 12px; display: inline-block; border-right: 6px solid transparent; - border-bottom: 6px solid rgb(51, 51, 51); + border-bottom: 6px solid #333; border-left: 6px solid transparent; content: ""; } } pre { - color: rgb(255, 255, 255); - background-color: rgb(61, 61, 61); - border-color: rgb(17, 17, 17); + color: #fff; + background-color: #3d3d3d; + border-color: #111; } /* ======================================================================= @@ -729,11 +730,11 @@ browser.css overrides margin: 2px 0; list-style-type: none; cursor: pointer; - background: rgb(51, 51, 51) !important; + background: #333 !important; } .fileBrowserDialog ul li a:hover { - color: rgb(9, 162, 255); + color: #09a2ff; background: none !important; } @@ -742,25 +743,25 @@ formWizard.css ========================================================================== */ legend.legendStep { - color: rgb(255, 255, 255); - margin-bottom: 0; + color: #ffffff; + margin-bottom: 0px; } div.stepsguide .step p { margin: 12px 0; - border-bottom: 4px solid rgb(35, 175, 220); + border-bottom: 4px solid #23AFDC; } div.stepsguide .disabledstep p { - border-bottom: 4px solid rgb(17, 120, 179); + border-bottom: 4px solid #1178B3; } div.formpaginate .prev, div.formpaginate .next { padding: 3px 6px; - color: rgb(255, 255, 255); + color: #fff; cursor: hand; cursor: pointer; - background: rgb(34, 101, 161); + background: #2265A1; -webkit-border-radius: 6px; -moz-border-radius: 6px; border-radius: 6px; @@ -771,25 +772,25 @@ pnotify.css ========================================================================== */ .ui-pnotify-container { - border: 1px solid rgb(17, 17, 17); - background-image: -moz-linear-gradient(rgb(51, 51, 51), rgb(61, 61, 61)) !important; - background-image: linear-gradient(rgb(51, 51, 51), rgb(61, 61, 61)) !important; - background-image: -webkit-linear-gradient(rgb(51, 51, 51), rgb(61, 61, 61)) !important; - background-image: -o-linear-gradient(rgb(51, 51, 51), rgb(61, 61, 61)) !important; - filter: progid:dximagetransform.microsoft.gradient(startColorstr=rgb(51, 51, 51), endColorstr=rgb(61, 61, 61)) !important; - -ms-filter: progid:dximagetransform.microsoft.gradient(startColorstr=rgb(51, 51, 51), endColorstr=rgb(61, 61, 61)) !important; - -moz-box-shadow: 0 0 2px rgb(0, 0, 0); - -webkit-box-shadow: 0 0 2px rgb(0, 0, 0); - -o-box-shadow: 0 0 2px rgb(0, 0, 0); - box-shadow: 0 0 2px rgb(0, 0, 0); + border: 1px solid #111; + background-image: -moz-linear-gradient(#333, #3d3d3d) !important; + background-image: linear-gradient(#333, #3d3d3d) !important; + background-image: -webkit-linear-gradient(#333, #3d3d3d) !important; + background-image: -o-linear-gradient(#333, #3d3d3d) !important; + filter: progid:dximagetransform.microsoft.gradient(startColorstr=#333, endColorstr=#3d3d3d) !important; + -ms-filter: progid:dximagetransform.microsoft.gradient(startColorstr=#333, endColorstr=#3d3d3d) !important; + -moz-box-shadow: 0px 0px 2px #000; + -webkit-box-shadow: 0px 0px 2px #000; + -o-box-shadow: 0px 0px 2px #000; + box-shadow: 0px 0px 2px #000; } .ui-pnotify-title { - color: rgb(255, 255, 255); + color: #ffffff; } .ui-pnotify-text { - color: rgb(255, 255, 255); + color: #ffffff; } /* ======================================================================= @@ -800,60 +801,60 @@ tablesorter.css width: 100%; margin-right: auto; margin-left: auto; - color: rgb(255, 255, 255); + color: #fff; text-align: left; - background-color: rgb(51, 51, 51); + background-color: #333; border-spacing: 0; } .tablesorter th, .tablesorter td { padding: 4px; - border-top: rgb(34, 34, 34) 1px solid; - border-left: rgb(34, 34, 34) 1px solid; + border-top: #222 1px solid; + border-left: #222 1px solid; vertical-align: middle; } .tablesorter th { - color: rgb(255, 255, 255); + color: #fff; text-align: center; text-shadow: -1px -1px 0 rgba(0,0,0,0.3); - background-color: rgb(21, 82, 143); + background-color: #15528F; border-collapse: collapse; font-weight: normal; } .tablesorter thead .tablesorter-headerDesc { - background-color: rgb(41, 122, 184); + background-color: #297AB8; background-image: url(data:image/gif;base64,R0lGODlhFQAEAIAAAP///////yH5BAEAAAEALAAAAAAVAAQAAAINjB+gC+jP2ptn0WskLQA7); /* background-image: url(../images/tablesorter/asc.gif); */ } .tablesorter thead .tablesorter-headerAsc { - background-color: rgb(41, 122, 184); + background-color: #297AB8; background-image: url(data:image/gif;base64,R0lGODlhFQAEAIAAAP///////yH5BAEAAAEALAAAAAAVAAQAAAINjI8Bya2wnINUMopZAQA7); /* background-image: url(../images/tablesorter/desc.gif); */ } thead.tablesorter-stickyHeader { - border-top: 2px solid rgb(34, 34, 34); - border-bottom: 2px solid rgb(34, 34, 34); + border-top: 2px solid #222; + border-bottom: 2px solid #222; } /* Zebra Widget - row alternating colors */ .tablesorter tr.odd, .sickbeardTable tr.odd { - background-color: rgb(51, 51, 51); + background-color: #333333; } .tablesorter tr.even, .sickbeardTable tr.even { - background-color: rgb(46, 46, 46); + background-color: #2e2e2e; } .tablesorter tr.tablesorter-filter-row, .tablesorter tr.tablesorter-filter-row td { text-align: center; - background: rgb(51, 51, 51); - border-bottom: 1px solid rgb(17, 17, 17); + background: #333; + border-bottom: 1px solid #111; } /* hidden filter row */ @@ -880,7 +881,7 @@ thead.tablesorter-stickyHeader { } #showListTable tbody { - color: rgb(0, 0, 0); + color: #000; } /* ======================================================================= @@ -888,11 +889,11 @@ token-input.css ========================================================================== */ div.token-input-dropdown { - background-color: rgb(255, 255, 255); - color: rgb(0, 0, 0); - border-left-color: rgb(204, 204, 204); - border-right-color: rgb(204, 204, 204); - border-bottom-color: rgb(204, 204, 204); + background-color: #fff; + color: #000; + border-left-color: #ccc; + border-right-color: #ccc; + border-bottom-color: #ccc; } /* ======================================================================= @@ -900,29 +901,29 @@ jquery.confirm.css ========================================================================== */ #confirmBox{ - background: rgb(34, 34, 34); + background: #222; width: 460px; position: fixed; left: 50%; top: 50%; margin: -130px 0 0 -230px; - border: 1px solid rgb(17, 17, 17); - box-shadow: 0 0 12px 0 rgba(0, 0, 0, 0.175); + border: 1px solid #111; + box-shadow: 0px 0px 12px 0px rgba(0, 0, 0, 0.175); } #confirmBox h1 { - background-color: rgb(21, 82, 143); - border-bottom: 1px solid rgb(17, 17, 17); - color: rgb(255, 255, 255); + background-color: #15528F; + border-bottom: 1px solid #111; + color: #fff; margin: 0; font-size: 22px; - text-shadow: 0 1px 1px rgba(0, 0, 0, 0.75); + text-shadow: 0px 1px 1px rgba(0, 0, 0, 0.75); } #confirmBox p { padding-top: 20px; - color: rgb(255, 255, 255); - text-shadow: 0 1px 1px rgba(0, 0, 0, 0.75); + color: #fff; + text-shadow: 0px 1px 1px rgba(0, 0, 0, 0.75); } /* ======================================================================= @@ -930,17 +931,17 @@ bootstrap modal ========================================================================== */ .modal-content { - background-color: rgb(61, 61, 61); + background-color: #3D3D3D; } .modal-body { - background-color: rgb(61, 61, 61); + background-color: #3D3D3D; } .modal-header { padding:9px 15px; - border-bottom:1px solid rgb(238, 238, 238); - background-color: rgb(21, 82, 143); + border-bottom:1px solid #eee; + background-color: #15528F; -webkit-border-top-left-radius: 5px; -webkit-border-top-right-radius: 5px; -moz-border-radius-topleft: 5px; @@ -954,13 +955,13 @@ bootstrap panel ========================================================================== */ .panel-default { - background-color: rgb(61, 61, 61); - border-color: rgb(17, 17, 17); + background-color: #3D3D3D; + border-color: #111111; } .panel-heading { - background-color: rgb(61, 61, 61) !important; - color: rgb(255, 255, 255) !important; + background-color: #3D3D3D !important; + color: #FFFFFF !important; } @@ -974,22 +975,22 @@ new #confirmBox left: 50%; top: 50%; margin: -130px 0 0 -230px; - border: 1px solid rgb(17, 17, 17); - box-shadow: 0 0 12px 0 rgba(0, 0, 0, 0.175); - border-radius: 0; + border: 1px solid #111; + box-shadow: 0px 0px 12px 0px rgba(0, 0, 0, 0.175); + border-radius: 0px; } .modal-content { - border-radius: 0; + border-radius: 0px; } .modal-header { border-bottom: none; - border-radius: 0; + border-radius: 0px; } .modal-body, .modal-content{ - background: rgb(34, 34, 34); + background: #222; } .modal-footer { @@ -1006,7 +1007,7 @@ new #confirmBox } .modal-header .close { - display: none; + display: none } .modal-footer button { @@ -1014,11 +1015,11 @@ new #confirmBox padding: 2px 15px; text-decoration: none; display: inline-block; - color: rgb(255, 255, 255); + color: #fff; text-align:center; - text-shadow: 0 1px 1px rgba(0, 0, 0, 0.75); + text-shadow: 0px 1px 1px rgba(0, 0, 0, 0.75); background-clip: padding-box; - border: 1px solid rgb(17, 17, 17); + border: 1px solid #111; border-radius: 3px; cursor: pointer; -webkit-box-sizing: border-box; @@ -1037,17 +1038,17 @@ new #confirmBox } .modal-footer button.confirm { - background-color: rgb(63, 118, 54); + background-color: #3F7636; } .modal-footer button.confirm:hover { - background-color: rgb(72, 135, 62); + background-color: #48873E; } .modal-footer button.cancel { - background-color: rgb(141, 45, 43); + background-color: #8D2D2B; } .modal-footer button.cancel:hover { - background-color: rgb(161, 51, 49); + background-color: #A13331; } diff --git a/gui/slick/css/light.css b/gui/slick/css/light.css index 0d9c7a7563..1fbaed69c0 100644 --- a/gui/slick/css/light.css +++ b/gui/slick/css/light.css @@ -3,50 +3,50 @@ home.mako ========================================================================== */ .progress-100 { - background-image: -moz-linear-gradient(rgb(166, 207, 65), rgb(91, 153, 13)) !important; - background-image: linear-gradient(rgb(166, 207, 65), rgb(91, 153, 13)) !important; - background-image: -webkit-linear-gradient(rgb(166, 207, 65), rgb(91, 153, 13)) !important; - background-image: -o-linear-gradient(rgb(166, 207, 65), rgb(91, 153, 13)) !important; + background-image: -moz-linear-gradient(#a6cf41, #5b990d) !important; + background-image: linear-gradient(#a6cf41, #5b990d) !important; + background-image: -webkit-linear-gradient(#a6cf41, #5b990d) !important; + background-image: -o-linear-gradient(#a6cf41, #5b990d) !important; -moz-border-radius: 3px; -webkit-border-radius: 3px; border-radius: 3px; } .progress-80 { - background-image: -moz-linear-gradient(rgb(225, 255, 151), rgb(157, 178, 105)) !important; - background-image: linear-gradient(rgb(225, 255, 151), rgb(157, 178, 105)) !important; - background-image: -webkit-linear-gradient(rgb(225, 255, 151), rgb(157, 178, 105)) !important; - background-image: -o-linear-gradient(rgb(225, 255, 151), rgb(157, 178, 105)) !important; + background-image: -moz-linear-gradient(#e1ff97, #9db269) !important; + background-image: linear-gradient(#e1ff97, #9db269) !important; + background-image: -webkit-linear-gradient(#e1ff97, #9db269) !important; + background-image: -o-linear-gradient(#e1ff97, #9db269) !important; -moz-border-radius: 3px; -webkit-border-radius: 3px; border-radius: 3px; } .progress-60 { - background-image: -moz-linear-gradient(rgb(250, 212, 64), rgb(242, 167, 13)) !important; - background-image: linear-gradient(rgb(250, 212, 64), rgb(242, 167, 13)) !important; - background-image: -webkit-linear-gradient(rgb(250, 212, 64), rgb(242, 167, 13)) !important; - background-image: -o-linear-gradient(rgb(250, 212, 64), rgb(242, 167, 13)) !important; + background-image: -moz-linear-gradient(#fad440, #f2a70d) !important; + background-image: linear-gradient(#fad440, #f2a70d) !important; + background-image: -webkit-linear-gradient(#fad440, #f2a70d) !important; + background-image: -o-linear-gradient(#fad440, #f2a70d) !important; -moz-border-radius: 3px; -webkit-border-radius: 3px; border-radius: 3px; } .progress-40 { - background-image: -moz-linear-gradient(rgb(250, 181, 67), rgb(242, 112, 13)) !important; - background-image: linear-gradient(rgb(250, 181, 67), rgb(242, 112, 13)) !important; - background-image: -webkit-linear-gradient(rgb(250, 181, 67), rgb(242, 112, 13)) !important; - background-image: -o-linear-gradient(rgb(250, 181, 67), rgb(242, 112, 13)) !important; + background-image: -moz-linear-gradient(#fab543, #f2700d) !important; + background-image: linear-gradient(#fab543, #f2700d) !important; + background-image: -webkit-linear-gradient(#fab543, #f2700d) !important; + background-image: -o-linear-gradient(#fab543, #f2700d) !important; -moz-border-radius: 3px; -webkit-border-radius: 3px; border-radius: 3px; } .progress-20 { - background-image: -moz-linear-gradient(rgb(218, 89, 69), rgb(177, 26, 16)) !important; - background-image: linear-gradient(rgb(218, 89, 69), rgb(177, 26, 16)) !important; - background-image: -webkit-linear-gradient(rgb(218, 89, 69), rgb(177, 26, 16)) !important; - background-image: -o-linear-gradient(rgb(218, 89, 69), rgb(177, 26, 16)) !important; + background-image: -moz-linear-gradient(#da5945, #b11a10) !important; + background-image: linear-gradient(#da5945, #b11a10) !important; + background-image: -webkit-linear-gradient(#da5945, #b11a10) !important; + background-image: -o-linear-gradient(#da5945, #b11a10) !important; -moz-border-radius: 3px; -webkit-border-radius: 3px; border-radius: 3px; @@ -58,8 +58,8 @@ displayShow.mako .displayShowTable th.row-seasonheader { border: none !important; - background-color: rgb(255, 255, 255) !important; - color: rgb(0, 0, 0) !important; + background-color: #fff !important; + color: #000 !important; padding-top: 15px !important; text-align: left !important; } @@ -69,7 +69,7 @@ testRename.mako ========================================================================== */ tr.seasonheader { - color: rgb(0, 0, 0) !important; + color: #000 !important; } /* ======================================================================= @@ -77,19 +77,19 @@ schedule.mako ========================================================================== */ table.cal-odd { - background-color: rgb(221, 221, 221); + background-color: #ddd; } table.cal-even { - background-color: rgb(210, 210, 210); + background-color: #d2d2d2; } .calendarShow .text .airtime { - color: rgb(0, 0, 0); + color:#000 } .calendarShow .text .episode-title { - color: rgb(136, 136, 136); + color:#888 } /* ======================================================================= @@ -98,33 +98,33 @@ bootstrap Overrides /* navbar styling */ .navbar-default { - background-color: rgb(51, 51, 51); + background-color: #333333; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#555555', endColorstr='#333333'); - background: -webkit-gradient(linear, left top, left bottom, from(rgb(85, 85, 85)), to(rgb(51, 51, 51))); - background: -moz-linear-gradient(top, rgb(85, 85, 85), rgb(51, 51, 51)); - border-color: rgb(62, 63, 58); + background: -webkit-gradient(linear, left top, left bottom, from(#555), to(#333)); + background: -moz-linear-gradient(top, #555, #333); + border-color: #3e3f3a; } .navbar-default .navbar-collapse, .navbar-default .navbar-form { - border-color: rgb(62, 63, 58); + border-color: #3e3f3a; } .navbar-default .navbar-nav > li > a:hover, .navbar-default .navbar-nav > li > a:focus { - background-color: rgb(51, 51, 51); + background-color: #333; } .dropdown-menu > li > a:hover, .dropdown-menu > li > a:focus { - color: rgb(255, 255, 255); + color: #fff; text-decoration: none; - background-color: rgb(51, 51, 51); + background-color: #333; } pre { - color: rgb(0, 0, 0); - background-color: rgb(245, 245, 245); - border-color: rgb(204, 204, 204); + color: #000; + background-color: #F5F5F5; + border-color: #ccc; } /* ======================================================================= @@ -135,9 +135,9 @@ tablesorter.css width: 100%; margin-right: auto; margin-left: auto; - color: rgb(0, 0, 0); + color: #000; text-align: left; - background-color: rgb(221, 221, 221); + background-color: #ddd; border-spacing: 0; } @@ -169,11 +169,11 @@ token-input.css ========================================================================== */ div.token-input-dropdown { - background-color: rgb(255, 255, 255); - color: rgb(0, 0, 0); - border-left-color: rgb(204, 204, 204); - border-right-color: rgb(204, 204, 204); - border-bottom-color: rgb(204, 204, 204); + background-color: #fff; + color: #000; + border-left-color: #ccc; + border-right-color: #ccc; + border-bottom-color: #ccc; } /* ======================================================================= @@ -182,8 +182,8 @@ bootstarp modal .modal-header { padding:9px 15px; - border-bottom:1px solid rgb(238, 238, 238); - background-color: rgb(245, 241, 228); + border-bottom:1px solid #eee; + background-color: #F5F1E4; -webkit-border-top-left-radius: 5px; -webkit-border-top-right-radius: 5px; -moz-border-radius-topleft: 5px; diff --git a/gui/slick/css/style.css b/gui/slick/css/style.css index fe9a25794d..be4e2d6002 100644 --- a/gui/slick/css/style.css +++ b/gui/slick/css/style.css @@ -1,6 +1,3 @@ -strong.warning { - color: red; -} /* ======================================================================= inc_top.mako ========================================================================== */ @@ -20,7 +17,7 @@ inc_top.mako .ui-autocomplete-loading { - background: rgb(255, 255, 255) url("../images/loading16.gif") right center no-repeat; + background: white url("../images/loading16.gif") right center no-repeat; } .browserDialog.busy .ui-dialog-buttonpane { @@ -33,7 +30,7 @@ inc_top.mako .ui-dialog, .ui-dialog-buttonpane { - background: rgb(236, 234, 223) url("../css/lib/images/ui-bg_fine-grain_10_eceadf_60x60.png") 50% 50% repeat !important; + background: #eceadf url("../css/lib/images/ui-bg_fine-grain_10_eceadf_60x60.png") 50% 50% repeat !important; } /* restore 1.8.x resize handle on dialog button pane */ @@ -47,7 +44,7 @@ inc_top.mako .ui-accordion-content, .ui-tabs-panel { - background: rgb(237, 237, 237) !important; + background: #ededed !important; background-image: none !important; } @@ -57,18 +54,18 @@ inc_top.mako } .ui-widget-content { - background: rgb(220, 220, 220) url("../css/lib/images/ui-bg_highlight-soft_75_dcdcdc_1x100.png") 50% top repeat-x; + background: #dcdcdc url("../css/lib/images/ui-bg_highlight-soft_75_dcdcdc_1x100.png") 50% top repeat-x; } .ui-widget-header { - background: rgb(255, 255, 255) url("../css/lib/images/ui-bg_flat_0_ffffff_40x100.png") 50% 50% repeat-x; + background: #ffffff url("../css/lib/images/ui-bg_flat_0_ffffff_40x100.png") 50% 50% repeat-x; } .ui-state-default, .ui-widget-content .ui-state-default, .ui-widget-header .ui-state-default { - background: rgb(255, 255, 255); - border: 1px solid rgb(204, 204, 204); + background: #ffffff; + border: 1px solid #CCCCCC; } .ui-state-hover, @@ -77,25 +74,25 @@ inc_top.mako .ui-state-focus, .ui-widget-content .ui-state-focus, .ui-widget-header .ui-state-focus { - background: rgb(255, 255, 255); + background: #ffffff; } .ui-state-active, .ui-widget-content .ui-state-active, .ui-widget-header .ui-state-active { - background: rgb(247, 247, 247); + background: #F7F7F7; } .ui-state-highlight, .ui-widget-content .ui-state-highlight, .ui-widget-header .ui-state-highlight { - background: rgb(251, 249, 238) url("../css/lib/images/ui-bg_glass_55_fbf9ee_1x400.png") 50% 50% repeat-x; + background: #fbf9ee url("../css/lib/images/ui-bg_glass_55_fbf9ee_1x400.png") 50% 50% repeat-x; } .ui-state-error, .ui-widget-content .ui-state-error, .ui-widget-header .ui-state-error { - background: rgb(254, 241, 236) url("../css/lib/images/ui-bg_glass_95_fef1ec_1x400.png") 50% 50% repeat-x; + background: #fef1ec url("../css/lib/images/ui-bg_glass_95_fef1ec_1x400.png") 50% 50% repeat-x; } .ui-icon, @@ -130,41 +127,41 @@ inc_top.mako } .ui-widget-overlay { - background: rgb(170, 170, 170) url("../css/lib/images/ui-bg_flat_0_aaaaaa_40x100.png") 50% 50% repeat-x; + background: #aaaaaa url("../css/lib/images/ui-bg_flat_0_aaaaaa_40x100.png") 50% 50% repeat-x; } .ui-widget-shadow { - background: rgb(0, 0, 0) url("../css/lib/images/ui-bg_flat_0_000000_40x100.png") 50% 50% repeat-x; + background: #000000 url("../css/lib/images/ui-bg_flat_0_000000_40x100.png") 50% 50% repeat-x; } .ui-state-active a, .ui-state-active a:link, .ui-state-active a:visited { - color: rgb(20, 15, 6); + color: #140F06; text-decoration: none; } .ui-state-default a, .ui-state-default a:link, .ui-state-default a:visited { - color: rgb(34, 34, 34); + color: #222; text-decoration: none; } .ui-tabs { - padding: 0; + padding: 0px; background: none; - border-width: 0; + border-width: 0px; } .ui-tabs .ui-tabs-nav { - padding-left: 0; + padding-left: 0px; background: transparent; - border-width: 0 0 0 0; - -moz-border-radius: 0; - -webkit-border-radius: 0; - border-radius: 0; + border-width: 0px 0px 0px 0px; + -moz-border-radius: 0px; + -webkit-border-radius: 0px; + border-radius: 0px; } .ui-tabs .ui-tabs-panel { - background-color: rgb(247, 247, 247) !important; - border: 1px solid rgb(204, 204, 204) !important; + background-color: #F7F7F7 !important; + border: 1px solid #CCCCCC !important; padding: 1em; } @@ -200,7 +197,7 @@ inc_top.mako margin-left: auto; margin-right: auto; margin-top: 50px; - margin-bottom: 0; + margin-bottom: 0px; } [class^="menu-icon-"], [class*=" menu-icon-"] { @@ -214,155 +211,155 @@ inc_top.mako } .menu-icon-addshow { - background-position: 0 0; + background-position: 0px 0px; } .menu-icon-anime { - background-position: -21px 0; + background-position: -21px 0px; } .menu-icon-backlog-view { - background-position: -42px 0; + background-position: -42px 0px; } .menu-icon-backlog { - background-position: -63px 0; + background-position: -63px 0px; } .menu-icon-bittorrent { - background-position: -84px 0; + background-position: -84px 0px; } .menu-icon-config-index { - background-position: -105px 0; + background-position: -105px 0px; } .menu-icon-config { - background-position: -126px 0; + background-position: -126px 0px; } .menu-icon-failed-download { - background-position: -147px 0; + background-position: -147px 0px; } .menu-icon-home { - background-position: -168px 0; + background-position: -168px 0px; } .menu-icon-manage { - background-position: -189px 0; + background-position: -189px 0px; } .menu-icon-manage-searches { - background-position: -210px 0; + background-position: -210px 0px; } .menu-icon-poster { - background-position: -231px 0; + background-position: -231px 0px; } .menu-icon-postprocess { - background-position: -252px 0; + background-position: -252px 0px; } .menu-icon-restart { - background-position: -273px 0; + background-position: -273px 0px; } .menu-icon-shutdown { - background-position: -294px 0; + background-position: -294px 0px; } .menu-icon-update { - background-position: -315px 0; + background-position: -315px 0px; } .menu-icon-viewlog-errors { - background-position: -336px 0; + background-position: -336px 0px; } .menu-icon-viewlog { - background-position: -357px 0; + background-position: -357px 0px; } .menu-icon-kodi { - background-position: -378px 0; + background-position: -378px 0px; } .menu-icon-help { - background-position: -399px 0; + background-position: -399px 0px; } .menu-icon-info { - background-position: -418px 0; + background-position: -418px 0px; } .menu-icon-error { - background-position: -436px 0; + background-position: -436px 0px; } .menu-icon-news { - background-position: -456px 0; + background-position: -456px 0px; } .menu-icon-irc { - background-position: -478px 0; + background-position: -478px 0px; } .menu-icon-changelog { - background-position: -495px 0; + background-position: -495px 0px; } .menu-icon-support { - background-position: -516px 0; + background-position: -516px 0px; } .menu-icon-plex { - background-position: -536px 0; + background-position: -536px 0px; } .menu-icon-backup { - background-position: -556px 0; + background-position: -556px 0px; } .menu-icon-provider { - background-position: -576px 0; + background-position: -576px 0px; } .menu-icon-notification { - background-position: -597px 0; + background-position: -597px 0px; } .menu-icon-emby { - background-position: -614px 0; + background-position: -614px 0px; } .menu-icon-blackhole { - background-position: -632px 0; + background-position: -632px 0px; } .menu-icon-schedule { - background-position: -653px 0; + background-position: -653px 0px; } .menu-icon-manage2 { - background-position: -673px 0; + background-position: -673px 0px; } .menu-icon-history { - background-position: -695px 0; + background-position: -695px 0px; } .menu-icon-trash { - background-position: -711px 0; + background-position: -711px 0px; } .menu-icon-cut { - background-position: -727px 0; + background-position: -727px 0px; } .menu-icon-select { - background-position: -742px 0; + background-position: -742px 0px; } .enable-daily-search-icon { @@ -397,13 +394,13 @@ inc_bottom.mako .footer { width: 100%; padding: 20px 0; - color: rgb(78, 78, 78); + color: #4e4e4e; text-align: center; font-size: 12px; } .footerhighlight { - color: rgb(17, 17, 17); + color: #111; display: inline; } @@ -413,13 +410,13 @@ inc_rootDirs.mako .rootdir-selectbox, .rootdir-selectbox #rootDirs, .rootdir-controls { - width: 430px; + width: 430px } .rootdir-selectbox { - padding: 0 0 5px; + padding: 0 0 5px } .rootdir-controls { - text-align: center; + text-align: center } /* ======================================================================= @@ -427,9 +424,10 @@ home.mako ========================================================================== */ .imgbanner .banner { - border: 1px solid rgb(204, 204, 204); + border: 1px solid #ccc; overflow: hidden; height: 66px; + overflow: hidden; border-radius: 8px; vertical-align: top; width: 360px; @@ -444,7 +442,7 @@ home.mako border-radius: 3px; vertical-align: middle; width: 45px; - border: 1px solid rgb(204, 204, 204); + border: 1px solid #ccc; margin-right: 5px; } @@ -468,57 +466,57 @@ home.mako height: 100%; overflow: visible; text-align: center; - text-shadow: 0 0 0.1em rgb(255, 255, 255); + text-shadow: 0 0 0.1em #fff; vertical-align: middle; font-size: 12px; - color: rgb(0, 0, 0); + color: #000000; } .progress-100 { - background-image: -moz-linear-gradient(rgb(57, 95, 7), rgb(42, 71, 5)) !important; - background-image: linear-gradient(rgb(57, 95, 7), rgb(42, 71, 5)) !important; - background-image: -webkit-linear-gradient(rgb(57, 95, 7), rgb(42, 71, 5)) !important; - background-image: -o-linear-gradient(rgb(57, 95, 7), rgb(42, 71, 5)) !important; + background-image: -moz-linear-gradient(#395f07, #2a4705) !important; + background-image: linear-gradient(#395f07, #2a4705) !important; + background-image: -webkit-linear-gradient(#395f07, #2a4705) !important; + background-image: -o-linear-gradient(#395f07, #2a4705) !important; -moz-border-radius: 3px; -webkit-border-radius: 3px; border-radius: 3px; } .progress-80 { - background-image: -moz-linear-gradient(rgb(166, 207, 65), rgb(91, 153, 13)) !important; - background-image: linear-gradient(rgb(166, 207, 65), rgb(91, 153, 13)) !important; - background-image: -webkit-linear-gradient(rgb(166, 207, 65), rgb(91, 153, 13)) !important; - background-image: -o-linear-gradient(rgb(166, 207, 65), rgb(91, 153, 13)) !important; + background-image: -moz-linear-gradient(#a6cf41, #5b990d) !important; + background-image: linear-gradient(#a6cf41, #5b990d) !important; + background-image: -webkit-linear-gradient(#a6cf41, #5b990d) !important; + background-image: -o-linear-gradient(#a6cf41, #5b990d) !important; -moz-border-radius: 3px; -webkit-border-radius: 3px; border-radius: 3px; } .progress-60 { - background-image: -moz-linear-gradient(rgb(250, 212, 64), rgb(242, 167, 13)) !important; - background-image: linear-gradient(rgb(250, 212, 64), rgb(242, 167, 13)) !important; - background-image: -webkit-linear-gradient(rgb(250, 212, 64), rgb(242, 167, 13)) !important; - background-image: -o-linear-gradient(rgb(250, 212, 64), rgb(242, 167, 13)) !important; + background-image: -moz-linear-gradient(#fad440, #f2a70d) !important; + background-image: linear-gradient(#fad440, #f2a70d) !important; + background-image: -webkit-linear-gradient(#fad440, #f2a70d) !important; + background-image: -o-linear-gradient(#fad440, #f2a70d) !important; -moz-border-radius: 3px; -webkit-border-radius: 3px; border-radius: 3px; } .progress-40 { - background-image: -moz-linear-gradient(rgb(250, 181, 67), rgb(242, 112, 13)) !important; - background-image: linear-gradient(rgb(250, 181, 67), rgb(242, 112, 13)) !important; - background-image: -webkit-linear-gradient(rgb(250, 181, 67), rgb(242, 112, 13)) !important; - background-image: -o-linear-gradient(rgb(250, 181, 67), rgb(242, 112, 13)) !important; + background-image: -moz-linear-gradient(#fab543, #f2700d) !important; + background-image: linear-gradient(#fab543, #f2700d) !important; + background-image: -webkit-linear-gradient(#fab543, #f2700d) !important; + background-image: -o-linear-gradient(#fab543, #f2700d) !important; -moz-border-radius: 3px; -webkit-border-radius: 3px; border-radius: 3px; } .progress-20 { - background-image: -moz-linear-gradient(rgb(218, 89, 69), rgb(177, 26, 16)) !important; - background-image: linear-gradient(rgb(218, 89, 69), rgb(177, 26, 16)) !important; - background-image: -webkit-linear-gradient(rgb(218, 89, 69), rgb(177, 26, 16)) !important; - background-image: -o-linear-gradient(rgb(218, 89, 69), rgb(177, 26, 16)) !important; + background-image: -moz-linear-gradient(#da5945, #b11a10) !important; + background-image: linear-gradient(#da5945, #b11a10) !important; + background-image: -webkit-linear-gradient(#da5945, #b11a10) !important; + background-image: -o-linear-gradient(#da5945, #b11a10) !important; -moz-border-radius: 3px; -webkit-border-radius: 3px; border-radius: 3px; @@ -547,10 +545,10 @@ span.show-option { .show-container { margin: 4px; - background-color: rgb(243, 243, 243); - border: 5px solid rgb(243, 243, 243); + background-color: #F3F3F3; + border: 5px solid #F3F3F3; overflow: hidden; - box-shadow: 1px 1px 3px 0 rgba(0, 0, 0, 0.31); + box-shadow: 1px 1px 3px 0px rgba(0, 0, 0, 0.31); } .show-details { @@ -569,29 +567,29 @@ span.show-option { } .show-container .ui-corner-all, .ui-corner-bottom, .ui-corner-right, .ui-corner-br { - border-bottom-right-radius: 0; + border-bottom-right-radius: 0px; } .show-container .ui-corner-all, .ui-corner-bottom, .ui-corner-left, .ui-corner-bl { - border-bottom-left-radius: 0; + border-bottom-left-radius: 0px; } .show-container .ui-corner-all, .ui-corner-top, .ui-corner-right, .ui-corner-tr { - border-top-right-radius: 0; + border-top-right-radius: 0px; } .show-container .ui-corner-all, .ui-corner-top, .ui-corner-left, .ui-corner-tl { - border-top-left-radius: 0; + border-top-left-radius: 0px; } .show-container .ui-widget-content { - border-top: 1px solid rgb(17, 17, 17); - border-bottom: 1px solid rgb(17, 17, 17); - border-left: 0; - border-right: 0; + border-top: 1px solid #111; + border-bottom: 1px solid #111; + border-left: 0px; + border-right: 0px; } .ui-progressbar .ui-progressbar-value { - height:20px; + height:20px } .ui-progressbar .progress-20 { border: none; @@ -600,8 +598,8 @@ span.show-option { .show-container .progress-40, .show-container .progress-60, .show-container .progress-80 { - border-radius: 0; - height: 7px; + border-radius: 0px; + height: 7px } .show-title { @@ -609,7 +607,7 @@ span.show-option { overflow: hidden; white-space: nowrap; font-size: 11px; - margin: 4px 4px 0 4px; + margin: 4px 4px 0px 4px; } .show-title:after { @@ -632,8 +630,8 @@ span.show-option { overflow: hidden; white-space: nowrap; font-size: 11px; - margin: 0 4px 4px 4px; - color: rgb(148, 148, 148); + margin: 0px 4px 4px 4px; + color: #949494; } .show-date:after { @@ -654,7 +652,7 @@ span.show-option { .show-table { text-align:center; vertical-align:middle; - width: 33%; + width: 33% } .show-add { @@ -706,13 +704,13 @@ span.show-option { } td.tvShow a { - color: rgb(0, 0, 0); + color: #000; text-decoration: none; } td.tvShow a:hover { cursor: pointer; - color: rgb(66, 139, 202); + color: #428BCA; } #popover-target label { @@ -775,23 +773,23 @@ div.buttontext p { } .add-list-icon-addnewshow { - background-position: 0 0; + background-position: 0px 0px; } .add-list-icon-addtrakt { - background-position: -37px 0; + background-position: -37px 0px; } .add-list-icon-addimdb { - background-position: -76px 0; + background-position: -76px 0px; } .add-list-icon-addexistingshow { - background-position: -113px 0; + background-position: -113px 0px; } .add-list-icon-addanime { - background-position: -150px 0; + background-position: -150px 0px; } /* ======================================================================= @@ -817,8 +815,8 @@ home_newShow.mako padding: 8px; overflow: hidden; font-size: 14px; - background-color: rgb(239, 239, 239); - border: 1px solid rgb(223, 222, 222); + background-color: #efefef; + border: 1px solid #dfdede; } #searchResults input[type="radio"] { @@ -844,7 +842,7 @@ ul#rootDirStaticList li { margin: 2px; list-style: none outside none; cursor: pointer; - background: url('../css/lib/images/ui-bg_highlight-soft_75_efefef_1x100.png') repeat-x scroll 50% 50% rgb(239, 239, 239); + background: url('../css/lib/images/ui-bg_highlight-soft_75_efefef_1x100.png') repeat-x scroll 50% 50% #EFEFEF; } ul#rootDirStaticList li label { @@ -887,8 +885,8 @@ home_trendingShows.mako .traktContainer { margin: 12px; width: 188px; - background-color: rgb(223, 218, 207); - border: 1px solid rgb(17, 17, 17); + background-color: #DFDACF; + border: 1px solid #111; border-radius: 6px; } @@ -897,7 +895,7 @@ home_trendingShows.mako width: 186px; border-top-left-radius: 5px; border-top-right-radius: 5px; - border-bottom: 1px solid rgb(17, 17, 17); + border-bottom: 1px solid #111; } /* ======================================================================= @@ -936,7 +934,7 @@ displayShow.mako #topcontrol { -webkit-filter: grayscale(100%); filter: grayscale(100%); - filter: rgb(128, 128, 128); + filter: gray; filter: url("data:image/svg+xml;utf8,#greyscale"); } @@ -946,7 +944,7 @@ h1.title { line-height: 30px; text-align: left; text-rendering: optimizelegibility; - border-bottom: 1px solid rgb(136, 136, 136); + border-bottom: 1px solid #888; } h1.title a { @@ -999,21 +997,21 @@ ul.tags li { margin-right: 4px; margin-bottom: 5px; padding: 3px 4px 3px 25px; - background: url(../images/tag.png) no-repeat scroll 5px 4px rgb(85, 85, 85); + background: url(../images/tag.png) no-repeat scroll 5px 4px #555; border-radius: 3px; - border: 1px solid rgb(17, 17, 17); - color: rgb(255, 255, 255); + border: 1px solid #111; + color: #FFF; font: 14px/18px "Open Sans", "Helvetica Neue", Helvetica, Arial, Geneva, sans-serif; - text-shadow: 0 1px rgba(0, 0, 0, 0.8); + text-shadow: 0px 1px rgba(0, 0, 0, 0.8); float: left; } ul.tags li a{ - color: rgb(255, 255, 255); + color: #FFF; } .tvshowImg { - border: 1px solid rgb(204, 204, 204); + border: 1px solid #ccc; border-radius: 5px; height: 311px; width: auto; @@ -1022,8 +1020,8 @@ ul.tags li a{ #summary { padding: 10px; - background-color: rgb(239, 239, 239); - border: 1px solid rgb(223, 222, 222); + background-color: #efefef; + border: 1px solid #dfdede; width: 100%; height: 250px; overflow: auto; @@ -1056,52 +1054,47 @@ ul.tags li a{ } .unaired { - background-color: rgb(245, 241, 228); + background-color: #f5f1e4; } .skipped { - background-color: rgb(190, 222, 237); + background-color: #bedeed; } .good { - background-color: rgb(195, 227, 200); + background-color: #c3e3c8; } .qual { - background-color: rgb(255, 218, 138); + background-color: #ffda8a; } .wanted { - background-color: rgb(255, 176, 176); + background-color: #ffb0b0; } .snatched { - background-color: rgb(235, 193, 234); -} -.failed { - text-decoration: line-through; - text-decoration-color: red; - background-color: grey; + background-color: #ebc1ea; } span.unaired { - color: rgb(88, 75, 32); - border: 1px solid rgb(88, 75, 32); + color: #584b20; + border: 1px solid #584b20; } span.skipped { - color: rgb(29, 80, 104); - border: 1px solid rgb(29, 80, 104); + color: #1d5068; + border: 1px solid #1d5068; } span.good { - color: rgb(41, 87, 48); - border: 1px solid rgb(41, 87, 48); + color: #295730; + border: 1px solid #295730; } span.qual { - color: rgb(118, 81, 0); - border: 1px solid rgb(118, 81, 0); + color: #765100; + border: 1px solid #765100; } span.wanted { - color: rgb(137, 0, 0); - border: 1px solid rgb(137, 0, 0); + color: #890000; + border: 1px solid #890000; } span.snatched { - color: rgb(101, 33, 100); - border: 1px solid rgb(101, 33, 100); + color: #652164; + border: 1px solid #652164; } span.unaired b, @@ -1110,7 +1103,7 @@ span.good b, span.qual b, span.wanted b, span.snatched b { - color: rgb(0, 0, 0); + color: #000000; font-weight: 800; } @@ -1137,17 +1130,17 @@ span.snatched b { text-align: center; border: none; empty-cells: show; - color: rgb(0, 0, 0) !important; + color: #000 !important; } .displayShowTable.display_show { - clear:both; + clear:both } .displayShowTable th.row-seasonheader { border: none !important; - background-color: rgb(34, 34, 34) !important; - color: rgb(255, 255, 255) !important; + background-color: #222 !important; + color: #fff !important; padding-top: 15px !important; text-align: left !important; } @@ -1167,27 +1160,27 @@ span.snatched b { } .sickbeardTable.display_show { - clear:both; + clear:both } .sickbeardTable th{ - color: rgb(255, 255, 255); + color: #fff; text-align: center; - background-color: rgb(51, 51, 51); + background-color: #333; white-space: nowrap; } .sickbeardTable th, .sickbeardTable td { - border-top: 1px solid rgb(255, 255, 255); - border-left: 1px solid rgb(255, 255, 255); + border-top: 1px solid #fff; + border-left: 1px solid #fff; padding: 4px; } th.row-seasonheader { border: none; - background-color: rgb(255, 255, 255); - color: rgb(0, 0, 0); + background-color: #fff; + color: #000; padding-top: 15px; text-align: left; } @@ -1197,7 +1190,7 @@ tr.seasonheader { padding-top: 10px; text-align: left; border: none; - color: rgb(255, 255, 255); + color: #fff; } th.col-checkbox, @@ -1327,39 +1320,39 @@ schedule.mako } .listing-default { - background-color: rgb(245, 241, 228); + background-color: #f5f1e4; } .listing-current { - background-color: rgb(221, 255, 221); + background-color: #dfd; } .listing-overdue { - background-color: rgb(255, 221, 221); + background-color: #fdd; } .listing-toofar { - background-color: rgb(190, 222, 237); + background-color: #bedeed; } span.listing-default { - color: rgb(130, 111, 48); - border: 1px solid rgb(130, 111, 48); + color: #826f30; + border: 1px solid #826f30; } span.listing-current { - color: rgb(41, 87, 48); - border: 1px solid rgb(41, 87, 48); + color: #295730; + border: 1px solid #295730; } span.listing-overdue { - color: rgb(137, 0, 0); - border: 1px solid rgb(137, 0, 0); + color: #890000; + border: 1px solid #890000; } span.listing-toofar { - color: rgb(29, 80, 104); - border: 1px solid rgb(29, 80, 104); + color: #1d5068; + border: 1px solid #1d5068; } h2.day, h2.network { @@ -1368,28 +1361,28 @@ h2.day, h2.network { line-height: 36px; font-weight: bold; letter-spacing: 1px; - color: rgb(255, 255, 255); + color: #FFF; text-align: center; - text-shadow: -1px -1px 0 rgba(0, 0, 0, 0.3); - background-color: rgb(51, 51, 51); + text-shadow: -1px -1px 0px rgba(0, 0, 0, 0.3); + background-color: #333; } .tvshowDiv { display: block; clear: both; - border: 1px solid rgb(204, 204, 204); + border: 1px solid #ccc; margin: auto; - padding: 0; + padding: 0px; text-align: left; width: 750px; border-radius: 5px; - background: rgb(255, 255, 255); + background: #fff; cursor: default; overflow: hidden; } .tvshowDiv a:hover { - color: rgb(66, 139, 202); + color: #428BCA; } .tvshowDiv a, .tvshowDiv a:link, .tvshowDiv a:visited, .tvshowDiv a:hover { @@ -1398,11 +1391,11 @@ h2.day, h2.network { } .tvshowTitle a { - color: rgb(0, 0, 0); + color: #000000; float: left; line-height: 1.4em; font-size: 1.4em; - text-shadow: -1px -1px 0 rgb(255, 255, 255); + text-shadow: -1px -1px 0 #FFF; } .tvshowTitleIcons { @@ -1417,28 +1410,28 @@ h2.day, h2.network { .tvshowDiv td.next_episode { width: 100%; height: 90%; - border-bottom: 1px solid rgb(204, 204, 204); + border-bottom: 1px solid #ccc; vertical-align: top; - color: rgb(0, 0, 0); + color: #000; } .bannerThumb { vertical-align: top; height: auto; width: 748px; - border-bottom: 1px solid rgb(204, 204, 204); + border-bottom: 1px solid #ccc; } .posterThumb { vertical-align: top; height: auto; width: 180px; - border-right: 1px solid rgb(204, 204, 204); + border-right: 1px solid #ccc; } .ep_listing { width: auto; - border: 1px solid rgb(204, 204, 204); + border: 1px solid #ccc; margin-bottom: 10px; padding: 10px; } @@ -1461,7 +1454,7 @@ h2.day, h2.network { .calendarWrapper { width:1000px; margin:0 auto; - padding:0 3px; + padding:0 3px } .calendarTable { @@ -1472,20 +1465,20 @@ h2.day, h2.network { } .calendarShow { - padding:0 !important; + padding:0 !important } .calendarShow .poster { - padding-bottom:2px; + padding-bottom:2px } .calendarShow .poster img { width:142px; - height:auto; + height:auto } .calendarShow .text { - padding:0 5px 10px 5px; + padding:0 5px 10px 5px } .calendarShow .text .airtime, @@ -1493,12 +1486,12 @@ h2.day, h2.network { overflow:hidden; text-overflow:ellipsis; display:block; - font-size:11px; + font-size:11px } .calendarShow .show-status { padding:5px 10px 10px; - text-align:center; + text-align:center } /* ======================================================================= @@ -1516,12 +1509,12 @@ config*.mako .component-group { padding: 15px 15px 25px; - border-bottom: 1px dotted rgb(204, 204, 204); + border-bottom: 1px dotted #ccc; min-height: 200px; } .component-item { - border-bottom: 1px dotted rgb(102, 102, 102); + border-bottom: 1px dotted #666; min-height: 200px; } @@ -1537,11 +1530,11 @@ config*.mako .component-group-desc p { width: 90%; margin: 10px 0; - color: rgb(102, 102, 102); + color: #666; } #config div.field-pair { - padding: 12px 0; + padding: 12px 0px; } #config div.field-pair select, @@ -1570,11 +1563,11 @@ config*.mako font-weight: normal; display:block; width:475px; - margin-left:182px; + margin-left:182px } #config label.space-right { - margin-right:10px; + margin-right:10px } #config .metadataDiv { @@ -1599,7 +1592,7 @@ select .selected { padding: 5px; margin-bottom: 10px; line-height: 20px; - border: 1px dotted rgb(204, 204, 204); + border: 1px dotted #CCC; } #providerOrderList { @@ -1623,16 +1616,16 @@ select .selected { } #provider_order_list .ui-state-default.torrent-provider { - background-color: rgb(255, 255, 255) !important; + background-color: #FFFFFF !important; } #provider_order_list .ui-state-default.nzb-provider { - background-color: rgb(221, 221, 221) !important; + background-color: #DDD !important; } #provider_order_list input, #service_order_list input { - margin: 0 2px; + margin: 0px 2px; } #config .tip_scale label span.component-title { @@ -1652,7 +1645,7 @@ select .selected { } .infoTableSeperator { - border-top: 1px dotted rgb(102, 102, 102); + border-top: 1px dotted #666666; } .infoTableHeader { @@ -1733,67 +1726,67 @@ select .selected { } .add-client-icon-sabnzbd { - background-position: 0 0; + background-position: 0px 0px; } .add-client-icon-nzbget { - background-position: -34px 0; + background-position: -34px 0px; } .add-client-icon-blackhole { - background-position: -71px 0; + background-position: -71px 0px; } .add-client-icon-deluge { - background-position: -106px 0; + background-position: -106px 0px; } .add-client-icon-deluged { - background-position: -106px 0; + background-position: -106px 0px; } .add-client-icon-qbittorrent { - background-position: -138px 0; + background-position: -138px 0px; } .add-client-icon-rtorrent { - background-position: -172px 0; + background-position: -172px 0px; } .add-client-icon-download-station { - background-position: -205px 0; + background-position: -205px 0px; } .add-client-icon-transmission { - background-position: -241px 0; + background-position: -241px 0px; } .add-client-icon-utorrent { - background-position: -273px 0; + background-position: -273px 0px; } .add-client-icon-spotnet { - background-position: -311px 0; + background-position: -311px 0px; } .add-client-icon-mlnet { - background-position: -344px 0; + background-position: -344px 0px; } .add-client-icon-rss { - background-position: -380px 0; + background-position: -380px 0px; } .add-client-icon-folder { - background-position: -416px 0; + background-position: -416px 0px; } .add-client-icon-ftp { - background-position: -452px 0; + background-position: -452px 0px; } .add-client-icon-irc { - background-position: -488px 0; + background-position: -488px 0px; } /* ======================================================================= @@ -1801,15 +1794,15 @@ config_postProcessing.mako ========================================================================== */ #config div.example { - padding: 10px; background-color: rgb(239, 239, 239); + padding: 10px; background-color: #efefef; } .Key { width: 100%; padding: 6px; font-size: 13px; - background-color: rgb(244, 244, 244); - border: 1px solid rgb(204, 204, 204); + background-color: #f4f4f4; + border: 1px solid #ccc; border-collapse: collapse; border-spacing: 0; } @@ -1817,9 +1810,9 @@ config_postProcessing.mako .Key th, .tableHeader { padding: 3px 9px; margin: 0; - color: rgb(255, 255, 255); + color: #fff; text-align: center; - background: none repeat scroll 0 0 rgb(102, 102, 102); + background: none repeat scroll 0 0 #666; } .Key td { @@ -1827,11 +1820,11 @@ config_postProcessing.mako } .Key tr { - border-bottom: 1px solid rgb(204, 204, 204); + border-bottom: 1px solid #ccc; } .Key tr.even { - background-color: rgb(223, 222, 222); + background-color: #dfdede; } .legend { @@ -1856,13 +1849,13 @@ div.metadata_example_wrapper { div.metadata_options { padding: 7px; overflow: auto; - background: rgb(245, 241, 228); - border: 1px solid rgb(204, 204, 204); + background: #f5f1e4; + border: 1px solid #ccc; } div.metadata_options label:hover { - color: rgb(255, 255, 255); - background-color: rgb(87, 68, 43); + color: #fff; + background-color: #57442b; cursor: pointer; } @@ -1870,7 +1863,7 @@ div.metadata_options label { display: block; padding-left: 7px; line-height: 20px; - color: rgb(0, 51, 102); + color: #036; } div.metadata_example { @@ -1880,22 +1873,22 @@ div.metadata_example { div.metadata_example label { display: block; line-height: 21px; - color: rgb(0, 0, 0); + color: #000; cursor: pointer; } div.metadataDiv .disabled { - color: rgb(204, 204, 204); + color: #ccc; } .notifier-icon { float: left; - margin: 6px 4px 0 0; + margin: 6px 4px 0px 0px; } .warning { - border-color: rgb(248, 148, 6); - background: url("../images/warning16.png") no-repeat right 5px center rgb(255, 255, 255); + border-color: #F89406; + background: url("../images/warning16.png") no-repeat right 5px center #fff; } [class^="icon-notifiers-"], [class*=" icon-notifiers-"] { @@ -1909,95 +1902,95 @@ div.metadataDiv .disabled { } .icon-notifiers-kodi { - background-position: 0 0; + background-position: 0px 0px; } .icon-notifiers-plex { - background-position: -35px 0; + background-position: -35px 0px; } .icon-notifiers-plexth { - background-position: -69px 0; + background-position: -69px 0px; } .icon-notifiers-emby { - background-position: -104px 0; + background-position: -104px 0px; } .icon-notifiers-nmj { - background-position: -136px 0; + background-position: -136px 0px; } .icon-notifiers-syno1 { - background-position: -168px 0; + background-position: -168px 0px; } .icon-notifiers-syno2 { - background-position: -202px 0; + background-position: -202px 0px; } .icon-notifiers-pytivo { - background-position: -237px 0; + background-position: -237px 0px; } .icon-notifiers-growl { - background-position: -272px 0; + background-position: -272px 0px; } .icon-notifiers-prowl { - background-position: -308px 0; + background-position: -308px 0px; } .icon-notifiers-libnotify { - background-position: -345px 0; + background-position: -345px 0px; } .icon-notifiers-pushover { - background-position: -377px 0; + background-position: -377px 0px; } .icon-notifiers-boxcar2 { - background-position: -414px 0; + background-position: -414px 0px; } .icon-notifiers-nma { - background-position: -450px 0; + background-position: -450px 0px; } .icon-notifiers-pushalot { - background-position: -486px 0; + background-position: -486px 0px; } .icon-notifiers-pushbullet { - background-position: -519px 0; + background-position: -519px 0px; } .icon-notifiers-freemobile { - background-position: -551px 0; + background-position: -551px 0px; } .icon-notifiers-telegram { - background-position: -587px 0; + background-position: -587px 0px; } .icon-notifiers-twitter { - background-position: -624px 0; + background-position: -624px 0px; } .icon-notifiers-trakt { - background-position: -659px 0; + background-position: -659px 0px; } .icon-notifiers-email { - background-position: -695px 0; + background-position: -695px 0px; } .icon-notifiers-anime { - background-position: -733px 0; + background-position: -733px 0px; } .icon-notifiers-look { - background-position: -769px 0; + background-position: -769px 0px; } /* ======================================================================= @@ -2029,7 +2022,7 @@ td.tableright { text-align: left; vertical-align: middle; width: 225px; - padding: 6px 0; + padding: 6px 0px; } .optionWrapper div.selectChoices { @@ -2050,11 +2043,11 @@ td.tableright { .separator { font-size: 90%; - color: rgb(51, 51, 51); + color: #333333; } a.whitelink { - color: rgb(255, 255, 255); + color: #fff; } /* ======================================================================= @@ -2063,8 +2056,8 @@ Global span.path { padding: 3px 6px; - color: rgb(139, 0, 0); - background-color: rgb(245, 241, 228); + color: #8b0000; + background-color: #f5f1e4; } .align-left { @@ -2084,8 +2077,8 @@ span.quality { background-image:linear-gradient(to bottom, rgba(255,255,255,0.08),rgba(255,255,255,0) 50%,rgba(0,0,0,0) 50%,rgba(0,0,0,0.25)); -webkit-box-shadow:inset 0 1px rgba(255,255,255,0.1),inset 0 -1px 3px rgba(0,0,0,0.3),inset 0 0 0 1px rgba(255,255,255,0.08),0 1px 2px rgba(0,0,0,0.15); box-shadow:inset 0 1px rgba(255,255,255,0.1),inset 0 -1px 3px rgba(0,0,0,0.3),inset 0 0 0 1px rgba(255,255,255,0.08),0 1px 2px rgba(0,0,0,0.15); - text-shadow: 0 1px rgba(0, 0, 0, 0.8); - color: rgb(255, 255, 255); + text-shadow: 0px 1px rgba(0, 0, 0, 0.8); + color: #FFFFFF; display: inline-block; padding: 2px 4px; text-align: center; @@ -2097,108 +2090,89 @@ span.quality { } span.any-hd { - background-color: rgb(38, 114, 182); + background-color: #2672b6; background: repeating-linear-gradient( -45deg, - rgb(38, 114, 182), - rgb(38, 114, 182) 10px, - rgb(91, 153, 13) 10px, - rgb(91, 153, 13) 20px + #2672b6, + #2672b6 10px, + #5b990d 10px, + #5b990d 20px ); } span.Custom { - background-color: rgb(98, 25, 147); + background-color: #621993; } span.HD { - background-color: rgb(38, 114, 182); + background-color: #2672B6; } span.HDTV { - background-color: rgb(38, 114, 182); + background-color: #2672B6; } span.HD720p { - background-color: rgb(91, 153, 13); + background-color: #5b990d; } span.HD1080p { - background-color: rgb(38, 114, 182); + background-color: #2672B6; } span.UHD-4K { - background-color: rgb(117, 0, 255); + background-color: #7500FF; } span.UHD-8K { - background-color: rgb(65, 0, 119); + background-color: #410077; } span.RawHD { - background-color: rgb(205, 115, 0); + background-color: #cd7300; } span.RawHDTV { - background-color: rgb(205, 115, 0); + background-color: #cd7300; } span.SD { - background-color: rgb(190, 38, 37); + background-color: #BE2625; } span.SDTV { - background-color: rgb(190, 38, 37); + background-color: #BE2625; } span.SDDVD { - background-color: rgb(190, 38, 37); + background-color: #BE2625; } span.Any { - background-color: rgb(102, 102, 102); + background-color: #666; } span.Unknown { - background-color: rgb(153, 153, 153); + background-color: #999; } span.Proper { - background-color: rgb(63, 127, 0); + background-color: #3F7F00; } span.false { - color: rgb(153, 51, 51); + color: #993333; /* red */ } span.true { - color: rgb(102, 153, 102); + color: #669966; /* green */ } span.break-word { word-break: break-all; } -span.required { - color: green; -} -span.preferred { - color: blue; -} -span.undesired { - color: orange; -} -span.ignored { - color: red; -} - -a.wiki { - color: red; -} -a.wiki strong{ - color: red; -} option.flag { padding-left: 35px; @@ -2238,7 +2212,7 @@ div.blackwhitelist{ } div.blackwhitelist input { - margin: 5px 0; + margin: 5px 0px; } div.blackwhitelist.pool select{ @@ -2255,7 +2229,7 @@ div.blackwhitelist span { } div.blackwhitelist.anidb, div.blackwhitelist.manual { - margin: 7px 0; + margin: 7px 0px; } /* ======================================================================= @@ -2266,7 +2240,7 @@ body { padding-top: 60px; overflow-y: scroll; font-family: "Open Sans", "Helvetica Neue", Helvetica, Arial, sans-serif; - color: rgb(0, 0, 0); + color: #000; } html * { @@ -2274,18 +2248,18 @@ html * { } input[type="checkbox"] { - margin: 2px 0 0; + margin: 2px 0px 0px; line-height: normal; } input[type="radio"] { - margin: 2px 0 0; + margin: 2px 0px 0px; line-height: normal; } input, textarea, select, .uneditable-input { width: auto; - color: rgb(0, 0, 0); + color: #000; } .container-fluid { @@ -2294,65 +2268,65 @@ input, textarea, select, .uneditable-input { } .navbar-brand { - padding: 0; + padding: 0px; } /* navbar styling */ .navbar-default .navbar-brand { - color: rgb(255, 255, 255); + color: #ffffff; } .navbar-default .navbar-brand:hover, .navbar-default .navbar-brand:focus { - color: rgb(255, 255, 255); + color: #ffffff; background-color: transparent; } .navbar-default .navbar-text { - color: rgb(221, 221, 221); + color: #dddddd; } .navbar-default .navbar-nav > li > a { - color: rgb(221, 221, 221); + color: #dddddd; } .navbar-default .navbar-nav > li > a:hover, .navbar-default .navbar-nav > li > a:focus { - color: rgb(255, 255, 255); + color: #ffffff; } .navbar-default .navbar-nav > .active > a, .navbar-default .navbar-nav > .active > a:hover, .navbar-default .navbar-nav > .active > a:focus { - color: rgb(255, 255, 255); - background-color: rgb(51, 51, 51); + color: #ffffff; + background-color: #333333; } .navbar-default .navbar-nav > .disabled > a, .navbar-default .navbar-nav > .disabled > a:hover, .navbar-default .navbar-nav > .disabled > a:focus { - color: rgb(204, 204, 204); + color: #cccccc; background-color: transparent; } .navbar-default .navbar-toggle { - border-color: rgb(204, 204, 204); + border-color: #cccccc; } .navbar-default .navbar-toggle:hover, .navbar-default .navbar-toggle:focus { - background-color: rgb(51, 51, 51); + background-color: #333333; } .navbar-default .navbar-toggle .icon-bar { - background-color: rgb(51, 51, 51); + background-color: #333333; } .navbar-default .navbar-nav > .open > a, .navbar-default .navbar-nav > .open > a:hover, .navbar-default .navbar-nav > .open > a:focus { - background-color: rgb(51, 51, 51); - color: rgb(255, 255, 255); + background-color: #333333; + color: #ffffff; } .navbar-default .navbar-nav > li.navbar-split > a { @@ -2370,55 +2344,55 @@ input, textarea, select, .uneditable-input { @media (max-width: 767px) { .navbar-default .navbar-nav .open .dropdown-menu > li > a { - color: rgb(221, 221, 221); + color: #dddddd; } .navbar-default .navbar-nav .open .dropdown-menu > li > a:hover, .navbar-default .navbar-nav .open .dropdown-menu > li > a:focus { - color: rgb(255, 255, 255); + color: #ffffff; background-color: transparent; } .navbar-default .navbar-nav .open .dropdown-menu > .active > a, .navbar-default .navbar-nav .open .dropdown-menu > .active > a:hover, .navbar-default .navbar-nav .open .dropdown-menu > .active > a:focus { - color: rgb(255, 255, 255); - background-color: rgb(51, 51, 51); + color: #ffffff; + background-color: #333333; } .navbar-default .navbar-nav .open .dropdown-menu > .disabled > a, .navbar-default .navbar-nav .open .dropdown-menu > .disabled > a:hover, .navbar-default .navbar-nav .open .dropdown-menu > .disabled > a:focus { - color: rgb(204, 204, 204); + color: #cccccc; background-color: transparent; } } .navbar-default .navbar-link { - color: rgb(221, 221, 221); + color: #dddddd; } .navbar-default .navbar-link:hover { - color: rgb(255, 255, 255); + color: #ffffff; } .navbar-default .btn-link { - color: rgb(152, 151, 139); + color: #98978b; } .navbar-default .btn-link:hover, .navbar-default .btn-link:focus { - color: rgb(221, 221, 221); + color: #dddddd; } .navbar-default .btn-link[disabled]:hover, fieldset[disabled] .navbar-default .btn-link:hover, .navbar-default .btn-link[disabled]:focus, fieldset[disabled] .navbar-default .btn-link:focus { - color: rgb(204, 204, 204); + color: #cccccc; } .dropdown-menu > li > a:hover, .dropdown-menu > li > a:focus { - color: rgb(38, 38, 38); + color: #262626; text-decoration: none; - background-color: rgb(245, 245, 245); + background-color: #F5F5F5; } .dropdown-menu > li > a { @@ -2426,13 +2400,13 @@ fieldset[disabled] .navbar-default .btn-link:focus { } .dropdown-menu { - background-color: rgb(245, 241, 228); + background-color: #F5F1E4; border: 1px solid rgba(0, 0, 0, 0.15); - box-shadow: 0 6px 12px rgba(0, 0, 0, 0.176); + box-shadow: 0px 6px 12px rgba(0, 0, 0, 0.176); } .form-control { - color: rgb(0, 0, 0); + color: #000000; } .form-control-inline { @@ -2450,25 +2424,25 @@ fieldset[disabled] .navbar-default .btn-link:focus { font-size: 12px; line-height: 16px; *line-height: 20px; - color: rgb(51, 51, 51); + color: #333333; text-align: center; text-shadow: 0 1px 1px rgba(255, 255, 255, 0.75); vertical-align: middle; cursor: pointer; - background-color: rgb(245, 245, 245); - *background-color: rgb(230, 230, 230); - background-image: -ms-linear-gradient(top, rgb(255, 255, 255), rgb(230, 230, 230)); - background-image: -webkit-gradient(linear, 0 0, 0 100%, from(rgb(255, 255, 255)), to(rgb(230, 230, 230))); - background-image: -webkit-linear-gradient(top, rgb(255, 255, 255), rgb(230, 230, 230)); - background-image: -o-linear-gradient(top, rgb(255, 255, 255), rgb(230, 230, 230)); - background-image: linear-gradient(top, rgb(255, 255, 255), rgb(230, 230, 230)); - background-image: -moz-linear-gradient(top, rgb(255, 255, 255), rgb(230, 230, 230)); + background-color: #f5f5f5; + *background-color: #e6e6e6; + background-image: -ms-linear-gradient(top, #ffffff, #e6e6e6); + background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#ffffff), to(#e6e6e6)); + background-image: -webkit-linear-gradient(top, #ffffff, #e6e6e6); + background-image: -o-linear-gradient(top, #ffffff, #e6e6e6); + background-image: linear-gradient(top, #ffffff, #e6e6e6); + background-image: -moz-linear-gradient(top, #ffffff, #e6e6e6); background-repeat: repeat-x; - border: 1px solid rgb(204, 204, 204); + border: 1px solid #cccccc; *border: 0; border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); - border-color: rgb(230, 230, 230) rgb(230, 230, 230) rgb(191, 191, 191); - border-bottom-color: rgb(179, 179, 179); + border-color: #e6e6e6 #e6e6e6 #bfbfbf; + border-bottom-color: #b3b3b3; -webkit-border-radius: 4px; -moz-border-radius: 4px; border-radius: 4px; @@ -2485,13 +2459,13 @@ fieldset[disabled] .navbar-default .btn-link:focus { .btn.active, .btn.disabled, .btn[disabled] { - background-color: rgb(230, 230, 230); - *background-color: rgb(217, 217, 217); + background-color: #e6e6e6; + *background-color: #d9d9d9; } .btn:active, .btn.active { - background-color: rgb(204, 204, 204) \9; + background-color: #cccccc \9; } .btn:first-child { @@ -2499,10 +2473,10 @@ fieldset[disabled] .navbar-default .btn-link:focus { } .btn:hover { - color: rgb(51, 51, 51); + color: #333333; text-decoration: none; - background-color: rgb(230, 230, 230); - *background-color: rgb(217, 217, 217); + background-color: #e6e6e6; + *background-color: #d9d9d9; background-position: 0 -15px; -webkit-transition: background-position 0.1s linear; -moz-transition: background-position 0.1s linear; @@ -2512,15 +2486,15 @@ fieldset[disabled] .navbar-default .btn-link:focus { } .btn:focus { - outline: thin dotted rgb(51, 51, 51); + outline: thin dotted #333; outline: 5px auto -webkit-focus-ring-color; outline-offset: -2px; } .btn.active, .btn:active { - background-color: rgb(230, 230, 230); - background-color: rgb(217, 217, 217) \9; + background-color: #e6e6e6; + background-color: #d9d9d9 \9; background-image: none; outline: 0; -webkit-box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05); @@ -2531,7 +2505,7 @@ fieldset[disabled] .navbar-default .btn-link:focus { .btn.disabled, .btn[disabled] { cursor: default; - background-color: rgb(230, 230, 230); + background-color: #e6e6e6; background-image: none; opacity: 0.65; filter: alpha(opacity=65); @@ -2581,7 +2555,7 @@ fieldset[disabled] .navbar-default .btn-link:focus { .btn-info:hover, .btn-inverse, .btn-inverse:hover { - color: rgb(255, 255, 255); + color: #ffffff; text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25); } @@ -2595,16 +2569,16 @@ fieldset[disabled] .navbar-default .btn-link:focus { } .btn-primary { - background-color: rgb(0, 116, 204); - *background-color: rgb(0, 85, 204); - background-image: -ms-linear-gradient(top, rgb(0, 136, 204), rgb(0, 85, 204)); - background-image: -webkit-gradient(linear, 0 0, 0 100%, from(rgb(0, 136, 204)), to(rgb(0, 85, 204))); - background-image: -webkit-linear-gradient(top, rgb(0, 136, 204), rgb(0, 85, 204)); - background-image: -o-linear-gradient(top, rgb(0, 136, 204), rgb(0, 85, 204)); - background-image: -moz-linear-gradient(top, rgb(0, 136, 204), rgb(0, 85, 204)); - background-image: linear-gradient(top, rgb(0, 136, 204), rgb(0, 85, 204)); + background-color: #0074cc; + *background-color: #0055cc; + background-image: -ms-linear-gradient(top, #0088cc, #0055cc); + background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#0088cc), to(#0055cc)); + background-image: -webkit-linear-gradient(top, #0088cc, #0055cc); + background-image: -o-linear-gradient(top, #0088cc, #0055cc); + background-image: -moz-linear-gradient(top, #0088cc, #0055cc); + background-image: linear-gradient(top, #0088cc, #0055cc); background-repeat: repeat-x; - border-color: rgb(0, 85, 204) rgb(0, 85, 204) rgb(0, 53, 128); + border-color: #0055cc #0055cc #003580; border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); filter: progid:dximagetransform.microsoft.gradient(startColorstr='#0088cc', endColorstr='#0055cc', GradientType=0); filter: progid:dximagetransform.microsoft.gradient(enabled=false); @@ -2615,26 +2589,26 @@ fieldset[disabled] .navbar-default .btn-link:focus { .btn-primary.active, .btn-primary.disabled, .btn-primary[disabled] { - background-color: rgb(0, 85, 204); - *background-color: rgb(0, 74, 179); + background-color: #0055cc; + *background-color: #004ab3; } .btn-primary:active, .btn-primary.active { - background-color: rgb(0, 64, 153) \9; + background-color: #004099 \9; } .btn-warning { - background-color: rgb(250, 167, 50); - *background-color: rgb(248, 148, 6); - background-image: -ms-linear-gradient(top, rgb(251, 180, 80), rgb(248, 148, 6)); - background-image: -webkit-gradient(linear, 0 0, 0 100%, from(rgb(251, 180, 80)), to(rgb(248, 148, 6))); - background-image: -webkit-linear-gradient(top, rgb(251, 180, 80), rgb(248, 148, 6)); - background-image: -o-linear-gradient(top, rgb(251, 180, 80), rgb(248, 148, 6)); - background-image: -moz-linear-gradient(top, rgb(251, 180, 80), rgb(248, 148, 6)); - background-image: linear-gradient(top, rgb(251, 180, 80), rgb(248, 148, 6)); + background-color: #faa732; + *background-color: #f89406; + background-image: -ms-linear-gradient(top, #fbb450, #f89406); + background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#fbb450), to(#f89406)); + background-image: -webkit-linear-gradient(top, #fbb450, #f89406); + background-image: -o-linear-gradient(top, #fbb450, #f89406); + background-image: -moz-linear-gradient(top, #fbb450, #f89406); + background-image: linear-gradient(top, #fbb450, #f89406); background-repeat: repeat-x; - border-color: rgb(248, 148, 6) rgb(248, 148, 6) rgb(173, 103, 4); + border-color: #f89406 #f89406 #ad6704; border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); filter: progid:dximagetransform.microsoft.gradient(startColorstr='#fbb450', endColorstr='#f89406', GradientType=0); filter: progid:dximagetransform.microsoft.gradient(enabled=false); @@ -2645,26 +2619,26 @@ fieldset[disabled] .navbar-default .btn-link:focus { .btn-warning.active, .btn-warning.disabled, .btn-warning[disabled] { - background-color: rgb(248, 148, 6); - *background-color: rgb(223, 133, 5); + background-color: #f89406; + *background-color: #df8505; } .btn-warning:active, .btn-warning.active { - background-color: rgb(198, 118, 5) \9; + background-color: #c67605 \9; } .btn-danger { - background-color: rgb(218, 79, 73); - *background-color: rgb(189, 54, 47); - background-image: -ms-linear-gradient(top, rgb(238, 95, 91), rgb(189, 54, 47)); - background-image: -webkit-gradient(linear, 0 0, 0 100%, from(rgb(238, 95, 91)), to(rgb(189, 54, 47))); - background-image: -webkit-linear-gradient(top, rgb(238, 95, 91), rgb(189, 54, 47)); - background-image: -o-linear-gradient(top, rgb(238, 95, 91), rgb(189, 54, 47)); - background-image: -moz-linear-gradient(top, rgb(238, 95, 91), rgb(189, 54, 47)); - background-image: linear-gradient(top, rgb(238, 95, 91), rgb(189, 54, 47)); + background-color: #da4f49; + *background-color: #bd362f; + background-image: -ms-linear-gradient(top, #ee5f5b, #bd362f); + background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#ee5f5b), to(#bd362f)); + background-image: -webkit-linear-gradient(top, #ee5f5b, #bd362f); + background-image: -o-linear-gradient(top, #ee5f5b, #bd362f); + background-image: -moz-linear-gradient(top, #ee5f5b, #bd362f); + background-image: linear-gradient(top, #ee5f5b, #bd362f); background-repeat: repeat-x; - border-color: rgb(189, 54, 47) rgb(189, 54, 47) rgb(128, 36, 32); + border-color: #bd362f #bd362f #802420; border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); filter: progid:dximagetransform.microsoft.gradient(startColorstr='#ee5f5b', endColorstr='#bd362f', GradientType=0); filter: progid:dximagetransform.microsoft.gradient(enabled=false); @@ -2675,26 +2649,26 @@ fieldset[disabled] .navbar-default .btn-link:focus { .btn-danger.active, .btn-danger.disabled, .btn-danger[disabled] { - background-color: rgb(189, 54, 47); - *background-color: rgb(169, 48, 42); + background-color: #bd362f; + *background-color: #a9302a; } .btn-danger:active, .btn-danger.active { - background-color: rgb(148, 42, 37) \9; + background-color: #942a25 \9; } .btn-success { - background-color: rgb(91, 183, 91); - *background-color: rgb(81, 163, 81); - background-image: -ms-linear-gradient(top, rgb(98, 196, 98), rgb(81, 163, 81)); - background-image: -webkit-gradient(linear, 0 0, 0 100%, from(rgb(98, 196, 98)), to(rgb(81, 163, 81))); - background-image: -webkit-linear-gradient(top, rgb(98, 196, 98), rgb(81, 163, 81)); - background-image: -o-linear-gradient(top, rgb(98, 196, 98), rgb(81, 163, 81)); - background-image: -moz-linear-gradient(top, rgb(98, 196, 98), rgb(81, 163, 81)); - background-image: linear-gradient(top, rgb(98, 196, 98), rgb(81, 163, 81)); + background-color: #5bb75b; + *background-color: #51a351; + background-image: -ms-linear-gradient(top, #62c462, #51a351); + background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#62c462), to(#51a351)); + background-image: -webkit-linear-gradient(top, #62c462, #51a351); + background-image: -o-linear-gradient(top, #62c462, #51a351); + background-image: -moz-linear-gradient(top, #62c462, #51a351); + background-image: linear-gradient(top, #62c462, #51a351); background-repeat: repeat-x; - border-color: rgb(81, 163, 81) rgb(81, 163, 81) rgb(56, 112, 56); + border-color: #51a351 #51a351 #387038; border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); filter: progid:dximagetransform.microsoft.gradient(startColorstr='#62c462', endColorstr='#51a351', GradientType=0); filter: progid:dximagetransform.microsoft.gradient(enabled=false); @@ -2705,26 +2679,26 @@ fieldset[disabled] .navbar-default .btn-link:focus { .btn-success.active, .btn-success.disabled, .btn-success[disabled] { - background-color: rgb(81, 163, 81); - *background-color: rgb(73, 146, 73); + background-color: #51a351; + *background-color: #499249; } .btn-success:active, .btn-success.active { - background-color: rgb(64, 129, 64) \9; + background-color: #408140 \9; } .btn-info { - background-color: rgb(73, 175, 205); - *background-color: rgb(47, 150, 180); - background-image: -ms-linear-gradient(top, rgb(91, 192, 222), rgb(47, 150, 180)); - background-image: -webkit-gradient(linear, 0 0, 0 100%, from(rgb(91, 192, 222)), to(rgb(47, 150, 180))); - background-image: -webkit-linear-gradient(top, rgb(91, 192, 222), rgb(47, 150, 180)); - background-image: -o-linear-gradient(top, rgb(91, 192, 222), rgb(47, 150, 180)); - background-image: -moz-linear-gradient(top, rgb(91, 192, 222), rgb(47, 150, 180)); - background-image: linear-gradient(top, rgb(91, 192, 222), rgb(47, 150, 180)); + background-color: #49afcd; + *background-color: #2f96b4; + background-image: -ms-linear-gradient(top, #5bc0de, #2f96b4); + background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#5bc0de), to(#2f96b4)); + background-image: -webkit-linear-gradient(top, #5bc0de, #2f96b4); + background-image: -o-linear-gradient(top, #5bc0de, #2f96b4); + background-image: -moz-linear-gradient(top, #5bc0de, #2f96b4); + background-image: linear-gradient(top, #5bc0de, #2f96b4); background-repeat: repeat-x; - border-color: rgb(47, 150, 180) rgb(47, 150, 180) rgb(31, 99, 119); + border-color: #2f96b4 #2f96b4 #1f6377; border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); filter: progid:dximagetransform.microsoft.gradient(startColorstr='#5bc0de', endColorstr='#2f96b4', GradientType=0); filter: progid:dximagetransform.microsoft.gradient(enabled=false); @@ -2735,26 +2709,26 @@ fieldset[disabled] .navbar-default .btn-link:focus { .btn-info.active, .btn-info.disabled, .btn-info[disabled] { - background-color: rgb(47, 150, 180); - *background-color: rgb(42, 133, 160); + background-color: #2f96b4; + *background-color: #2a85a0; } .btn-info:active, .btn-info.active { - background-color: rgb(36, 116, 140) \9; + background-color: #24748c \9; } .btn-inverse { - background-color: rgb(65, 65, 65); - *background-color: rgb(34, 34, 34); - background-image: -ms-linear-gradient(top, rgb(85, 85, 85), rgb(34, 34, 34)); - background-image: -webkit-gradient(linear, 0 0, 0 100%, from(rgb(85, 85, 85)), to(rgb(34, 34, 34))); - background-image: -webkit-linear-gradient(top, rgb(85, 85, 85), rgb(34, 34, 34)); - background-image: -o-linear-gradient(top, rgb(85, 85, 85), rgb(34, 34, 34)); - background-image: -moz-linear-gradient(top, rgb(85, 85, 85), rgb(34, 34, 34)); - background-image: linear-gradient(top, rgb(85, 85, 85), rgb(34, 34, 34)); + background-color: #414141; + *background-color: #222222; + background-image: -ms-linear-gradient(top, #555555, #222222); + background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#555555), to(#222222)); + background-image: -webkit-linear-gradient(top, #555555, #222222); + background-image: -o-linear-gradient(top, #555555, #222222); + background-image: -moz-linear-gradient(top, #555555, #222222); + background-image: linear-gradient(top, #555555, #222222); background-repeat: repeat-x; - border-color: rgb(34, 34, 34) rgb(34, 34, 34) rgb(0, 0, 0); + border-color: #222222 #222222 #000000; border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); filter: progid:dximagetransform.microsoft.gradient(startColorstr='#555555', endColorstr='#222222', GradientType=0); filter: progid:dximagetransform.microsoft.gradient(enabled=false); @@ -2765,13 +2739,13 @@ fieldset[disabled] .navbar-default .btn-link:focus { .btn-inverse.active, .btn-inverse.disabled, .btn-inverse[disabled] { - background-color: rgb(34, 34, 34); - *background-color: rgb(21, 21, 21); + background-color: #222222; + *background-color: #151515; } .btn-inverse:active, .btn-inverse.active { - background-color: rgb(8, 8, 8) \9; + background-color: #080808 \9; } .btn-xs { @@ -2788,7 +2762,7 @@ fieldset[disabled] .navbar-default .btn-link:focus { right: 12px; display: inline-block; border-right: 6px solid transparent; - border-bottom: 6px solid rgb(245, 241, 228); + border-bottom: 6px solid #F5F1E4; border-left: 6px solid transparent; content: ""; } @@ -2869,8 +2843,8 @@ fieldset.sectionwrap { } legend.legendStep { - color: rgb(87, 68, 43); - margin-bottom: 0; + color: #57442b; + margin-bottom: 0px; } div.stepsguide { @@ -2888,15 +2862,15 @@ div.stepsguide .step { div.stepsguide .step p { margin: 12px 0; - border-bottom: 4px solid rgb(87, 68, 43); + border-bottom: 4px solid #57442b; } div.stepsguide .disabledstep { - color: rgb(196, 196, 196); + color: #c4c4c4; } div.stepsguide .disabledstep p { - border-bottom: 4px solid rgb(138, 119, 94); + border-bottom: 4px solid #8a775e; } div.stepsguide .step .smalltext { @@ -2914,10 +2888,10 @@ div.formpaginate { div.formpaginate .prev, div.formpaginate .next { padding: 3px 6px; - color: rgb(255, 255, 255); + color: #fff; cursor: hand; cursor: pointer; - background: rgb(87, 68, 43); + background: #57442b; -webkit-border-radius: 6px; -moz-border-radius: 6px; border-radius: 6px; @@ -2975,7 +2949,7 @@ div.field-pair input { width: 85%; margin: .8em 0; font-size: 1.13em; - color: rgb(102, 102, 102); + color: #666; } /* ======================================================================= @@ -2986,17 +2960,17 @@ tablesorter.css width: 100%; margin-right: auto; margin-left: auto; - color: rgb(0, 0, 0); + color: #000; text-align: left; - background-color: rgb(255, 255, 255); + background-color: #fff; border-spacing: 0; } .tablesorter th, .tablesorter td { padding: 4px; - border-top: rgb(255, 255, 255) 1px solid; - border-left: rgb(255, 255, 255) 1px solid; + border-top: #fff 1px solid; + border-left: #fff 1px solid; vertical-align: middle; } @@ -3007,10 +2981,10 @@ tablesorter.css } .tablesorter th { - color: rgb(255, 255, 255); + color: #fff; text-align: center; text-shadow: -1px -1px 0 rgba(0,0,0,0.3); - background-color: rgb(51, 51, 51); + background-color: #333; border-collapse: collapse; font-weight: normal; } @@ -3025,13 +2999,13 @@ tablesorter.css } .tablesorter thead .tablesorter-headerDesc { - background-color: rgb(85, 85, 85); + background-color: #555; background-image: url(data:image/gif;base64,R0lGODlhFQAEAIAAAP///////yH5BAEAAAEALAAAAAAVAAQAAAINjB+gC+jP2ptn0WskLQA7); /* background-image: url(../images/tablesorter/asc.gif); */ } .tablesorter thead .tablesorter-headerAsc { - background-color: rgb(85, 85, 85); + background-color: #555; background-image: url(data:image/gif;base64,R0lGODlhFQAEAIAAAP///////yH5BAEAAAEALAAAAAAVAAQAAAINjI8Bya2wnINUMopZAQA7); /* background-image: url(../images/tablesorter/desc.gif); */ } @@ -3043,17 +3017,17 @@ tablesorter.css } thead.tablesorter-stickyHeader { - border-top: 2px solid rgb(255, 255, 255); - border-bottom: 2px solid rgb(255, 255, 255); + border-top: 2px solid #fff; + border-bottom: 2px solid #fff; } /* Zebra Widget - row alternating colors */ .tablesorter tr.odd, .sickbeardTable tr.odd { - background-color: rgb(245, 241, 228); + background-color: #f5f1e4; } .tablesorter tr.even, .sickbeardTable tr.even { - background-color: rgb(223, 218, 207); + background-color: #dfdacf; } /* filter widget */ @@ -3072,8 +3046,8 @@ thead.tablesorter-stickyHeader { .tablesorter tr.tablesorter-filter-row, .tablesorter tr.tablesorter-filter-row td { text-align: center; - background: rgb(238, 238, 238); - border-bottom: 1px solid rgb(221, 221, 221); + background: #eee; + border-bottom: 1px solid #ddd; } /* optional disabled input styling */ @@ -3087,15 +3061,15 @@ thead.tablesorter-stickyHeader { } .tablesorter tfoot tr { - color: rgb(255, 255, 255); + color: #fff; text-align: center; text-shadow: -1px -1px 0 rgba(0,0,0,0.3); - background-color: rgb(51, 51, 51); + background-color: #333; border-collapse: collapse; } .tablesorter tfoot a { - color: rgb(255, 255, 255); + color:#fff; text-decoration: none; } @@ -3108,14 +3082,14 @@ ul.token-input-list { height: auto !important; height: 1%; width: 273px; - border: 1px solid rgb(204, 204, 204); + border: 1px solid #ccc; cursor: text; font-size: 10px; - font-family: Verdana, sans-serif; + font-family: Verdana; z-index: 999; margin: 0; padding: 0 0 1px 0; - background-color: rgb(255, 255, 255); + background-color: #fff; list-style-type: none; /* clear: left; */ border-top-left-radius: 3px; @@ -3131,7 +3105,7 @@ ul.token-input-list li { ul.token-input-list li input { border: 0; padding: 3px 4px; - background-color: rgb(255, 255, 255); + background-color: white; /* -webkit-appearance: caret; */ } @@ -3141,8 +3115,8 @@ li.token-input-token { height: 1%; margin: 3px; padding: 3px 5px 0 5px; - background-color: rgb(208, 239, 160); - color: rgb(0, 0, 0); + background-color: #d0efa0; + color: #000; font-weight: bold; cursor: default; display: block; @@ -3168,17 +3142,17 @@ li.token-input-token p { li.token-input-token span { float: right; - color: rgb(119, 119, 119); + color: #777; cursor: pointer; } li.token-input-selected-token { - background-color: rgb(8, 132, 78); - color: rgb(255, 255, 255); + background-color: #08844e; + color: #fff; } li.token-input-selected-token span { - color: rgb(187, 187, 187); + color: #bbb; } li.token-input-input-token input { @@ -3194,7 +3168,7 @@ div.token-input-dropdown { border-bottom: 1px solid; cursor: default; font-size: 11px; - font-family: Verdana, sans-serif; + font-family: Verdana; z-index: 1; } @@ -3202,7 +3176,7 @@ div.token-input-dropdown p { margin: 0; padding: 3px; font-weight: bold; - color: rgb(119, 119, 119); + color: #777; } div.token-input-dropdown ul { @@ -3211,17 +3185,17 @@ div.token-input-dropdown ul { } div.token-input-dropdown ul li { - background-color: rgb(255, 255, 255); + background-color: #fff; padding: 3px; list-style-type: none; } div.token-input-dropdown ul li.token-input-dropdown-item { - background-color: rgb(250, 250, 250); + background-color: #fafafa; } div.token-input-dropdown ul li.token-input-dropdown-item2 { - background-color: rgb(255, 255, 255); + background-color: #fff; } div.token-input-dropdown ul li em { @@ -3230,24 +3204,21 @@ div.token-input-dropdown ul li em { } div.token-input-dropdown ul li.token-input-selected-dropdown-item { - background-color: rgb(97, 150, 194); + background-color: #6196c2; } span.token-input-delete-token { margin: 0 1px; } -.red-text {color: rgb(221, 51, 51); -} -.clear-left {clear:left; -} -.nextline-block {display:block; -} +.red-text {color:#d33} +.clear-left {clear:left} +.nextline-block {display:block} .trakt-image { display: block; z-index: 0; - background-image: url(/images/poster-dark.jpg); + background-image: url(/images/poster-dark.jpg) } /* ======================================================================= @@ -3267,14 +3238,14 @@ jquery.confirm.css } #confirmBox{ - background: rgb(245, 241, 228); + background: #F5F1E4; width: 460px; position: fixed; left: 50%; top: 50%; margin: -130px 0 0 -230px; - border: 1px solid rgb(17, 17, 17); - box-shadow: 0 0 12px 0 rgba(0, 0, 0, 0.175); + border: 1px solid #111; + box-shadow: 0px 0px 12px 0px rgba(0, 0, 0, 0.175); } #confirmBox h1, @@ -3283,18 +3254,18 @@ jquery.confirm.css } #confirmBox h1 { - background-color: rgb(51, 51, 51); - border-bottom: 1px solid rgb(17, 17, 17); - color: rgb(255, 255, 255); + background-color: #333; + border-bottom: 1px solid #111; + color: #fff; margin: 0; font-size: 22px; - text-shadow: 0 1px 1px rgba(0, 0, 0, 0.75); + text-shadow: 0px 1px 1px rgba(0, 0, 0, 0.75); } #confirmBox p { padding-top: 20px; - color: rgb(0, 0, 0); - text-shadow: 0 1px 1px rgba(255, 255, 255, 0.75); + color: #000; + text-shadow: 0px 1px 1px rgba(255, 255, 255, 0.75); } #confirmButtons { @@ -3307,11 +3278,11 @@ jquery.confirm.css padding: 2px 20px; text-decoration: none; display: inline-block; - color: rgb(255, 255, 255); + color: #fff; text-align:center; - text-shadow: 0 1px 1px rgba(0, 0, 0, 0.75); + text-shadow: 0px 1px 1px rgba(0, 0, 0, 0.75); background-clip: padding-box; - border: 1px solid rgb(17, 17, 17); + border: 1px solid #111; border-radius: 3px; cursor: pointer; -webkit-box-sizing: border-box; @@ -3330,19 +3301,19 @@ jquery.confirm.css } #confirmBox .green { - background-color: rgb(63, 118, 54); + background-color: #3F7636; } #confirmBox .green:hover { - background-color: rgb(72, 135, 62); + background-color: #48873E; } #confirmBox .red { - background-color: rgb(141, 45, 43); + background-color: #8D2D2B; } #confirmBox .red:hover { - background-color: rgb(161, 51, 49); + background-color: #A13331; } /* ======================================================================= @@ -3369,7 +3340,7 @@ login.css .login h1 { padding: 0 0 10px; font-size: 60px; - font-family: Lobster, cursive; + font-family: Lobster; font-weight: normal; } @@ -3437,8 +3408,8 @@ IMDB Popular } .popularShow h3{ - padding:0; - margin:0; + padding:0px; + margin:0px; display:inline-block; margin-right:30px; } @@ -3467,11 +3438,11 @@ IMDB Popular .popularShow .rating{ font-size:90%; display:inline-block; - margin-left:0; + margin-left:0px; } .popularShow p{ - margin-bottom:0; + margin-bottom:0px; } table.home-header { @@ -3528,4 +3499,4 @@ viewLog.mako to configure the same for the columns which are used for the filter inputs.*/ .log-filter { margin-top: 20px; -} +} \ No newline at end of file diff --git a/gui/slick/js/addShowOptions.js b/gui/slick/js/addShowOptions.js index 5bc5066199..223bf051e5 100644 --- a/gui/slick/js/addShowOptions.js +++ b/gui/slick/js/addShowOptions.js @@ -2,10 +2,10 @@ $(document).ready(function () { $('#saveDefaultsButton').click(function () { var anyQualArray = []; var bestQualArray = []; - $('#allowed_qualities option:selected').each(function (i, d) { + $('#anyQualities option:selected').each(function (i, d) { anyQualArray.push($(d).val()); }); - $('#preferred_qualities option:selected').each(function (i, d) { + $('#bestQualities option:selected').each(function (i, d) { bestQualArray.push($(d).val()); }); @@ -28,7 +28,7 @@ $(document).ready(function () { }); }); - $('#statusSelect, #qualityPreset, #flatten_folders, #allowed_qualities, #preferred_qualities, #subtitles, #scene, #anime, #statusSelectAfter').change(function () { + $('#statusSelect, #qualityPreset, #flatten_folders, #anyQualities, #bestQualities, #subtitles, #scene, #anime, #statusSelectAfter').change(function () { $('#saveDefaultsButton').attr('disabled', false); }); diff --git a/gui/slick/js/qualityChooser.js b/gui/slick/js/qualityChooser.js index e4d25d68f8..81439f94f2 100644 --- a/gui/slick/js/qualityChooser.js +++ b/gui/slick/js/qualityChooser.js @@ -7,7 +7,7 @@ $(document).ready(function() { $('#customQuality').hide(); } - $('#allowed_qualities option').each(function() { + $('#anyQualities option').each(function() { var result = preset & $(this).val(); // jshint ignore:line if (result > 0) { $(this).attr('selected', 'selected'); @@ -16,7 +16,7 @@ $(document).ready(function() { } }); - $('#preferred_qualities option').each(function() { + $('#bestQualities option').each(function() { var result = preset & ($(this).val() << 16); // jshint ignore:line if (result > 0) { $(this).attr('selected', 'selected'); diff --git a/gui/slick/views/500.mako b/gui/slick/views/500.mako index 1d58216097..4a8547d4ac 100644 --- a/gui/slick/views/500.mako +++ b/gui/slick/views/500.mako @@ -1,15 +1,16 @@ <%inherit file="/layouts/main.mako"/> + <%block name="content">

    ${header}

    -A mako error has occured.
    -If this happened during an update a simple page refresh may be the solution.
    -Mako errors that happen during updates may be a one time error if there were significant ui changes.
    +A mako error has occured.
    +If this happened during an update a simple page refresh may be the solution.
    +Mako errors that happen during updates may be a one time error if there were significant ui changes.


    Show/Hide Error
    -
    +
     <% filename, lineno, function, line = backtrace.traceback[-1] %>
    diff --git a/gui/slick/views/IRC.mako b/gui/slick/views/IRC.mako
    index 86d94a0d0e..534bb432fd 100644
    --- a/gui/slick/views/IRC.mako
    +++ b/gui/slick/views/IRC.mako
    @@ -4,5 +4,5 @@
     from sickbeard import GIT_USERNAME
     username = ("MedusaUI|?", GIT_USERNAME)[bool(GIT_USERNAME)]
     %>
    -
    +
     
    diff --git a/gui/slick/views/addShows.mako b/gui/slick/views/addShows.mako
    index 258beb0a67..bd00ab60ad 100644
    --- a/gui/slick/views/addShows.mako
    +++ b/gui/slick/views/addShows.mako
    @@ -10,37 +10,44 @@
     % else:
         

    ${title}

    % endif + diff --git a/gui/slick/views/addShows_addExistingShow.mako b/gui/slick/views/addShows_addExistingShow.mako index a89cb43904..55621a5e89 100644 --- a/gui/slick/views/addShows_addExistingShow.mako +++ b/gui/slick/views/addShows_addExistingShow.mako @@ -3,8 +3,8 @@ import sickbeard %> <%block name="scripts"> - - + + <%block name="content"> % if not header is UNDEFINED: @@ -12,11 +12,13 @@ % else:

    ${title}

    % endif +
    +
    - +
    -
    +
    +

    Medusa can add existing shows, using the current options, by using locally stored NFO/XML metadata to eliminate user interaction. If you would rather have Medusa prompt you to customize each show, then use the checkbox below.

    +

    +
    +

    Displaying folders within these directories which aren't already added to Medusa:

    +
    -
    +
    -
    -
    +
    +
    diff --git a/gui/slick/views/addShows_newShow.mako b/gui/slick/views/addShows_newShow.mako index 3e4e9c4678..5d388a4030 100644 --- a/gui/slick/views/addShows_newShow.mako +++ b/gui/slick/views/addShows_newShow.mako @@ -4,10 +4,10 @@ from sickbeard.helpers import anon_url %> <%block name="scripts"> - - - - + + + + <%block name="content"> % if not header is UNDEFINED: @@ -15,19 +15,23 @@ % else:

    ${title}

    % endif +
    +
    -
    -
    +
    +
    Find a show on theTVDB +
    + % if use_provided_info: Show retrieved from existing metadata: ${provided_indexer_name} @@ -35,51 +39,56 @@ % else: - +    *     -

    - * This will only affect the language of the retrieved metadata file contents and episode filenames.
    - This DOES NOT allow Medusa to download non-english TV episodes!

    -

    + +

    + * This will only affect the language of the retrieved metadata file contents and episode filenames.
    + This DOES NOT allow Medusa to download non-english TV episodes!

    +

    % endif
    +
    Pick the parent folder
    % if provided_show_dir: - Pre-chosen Destination Folder: ${provided_show_dir}
    -
    + Pre-chosen Destination Folder: ${provided_show_dir}
    +
    % else: <%include file="/inc_rootDirs.mako"/> % endif
    +
    Customize options
    <%include file="/inc_addShowOptions.mako"/>
    + % for curNextDir in other_shows: % endfor
    -
    +
    +
    % if provided_show_dir: diff --git a/gui/slick/views/addShows_popularShows.mako b/gui/slick/views/addShows_popularShows.mako index 57947b9fed..6ced63bbb6 100644 --- a/gui/slick/views/addShows_popularShows.mako +++ b/gui/slick/views/addShows_popularShows.mako @@ -12,6 +12,7 @@ % else:

    ${title}

    % endif +
    Sort By: - Sort Order: + + Sort Order:
    + <% imdb_tt = [show.imdbid for show in sickbeard.showList if show.imdbid] %> -
    + +
    -% if not popular_shows: -
    + % if not popular_shows: +

    Fetching of IMDB Data failed. Are you online? Exception:

    ${imdb_exception}

    -% else: - % for cur_result in popular_shows: - % if cur_result['imdb_tt'] not in imdb_tt: - <% - cur_rating = float(cur_result.get('rating', 0)) - cur_votes = int(cur_result.get('votes', 0)) - %> + % else: + % for cur_result in popular_shows: + % if cur_result['imdb_tt'] in imdb_tt: + <% continue %> + % endif + + % if 'rating' in cur_result and cur_result['rating']: + <% cur_rating = cur_result['rating'] %> + <% cur_votes = cur_result['votes'] %> + % else: + <% cur_rating = '0' %> + <% cur_votes = '0' %> + % endif +
    +
    - ${cur_result['name'] or ' '} + ${(cur_result['name'], ' ')['' == cur_result['name']]}
    +
    -

    ${int(cur_rating*10)}%

    - $('{x} votes'.format(x=cur_votes) if cur_votes else '') +

    ${int(float(cur_rating)*10)}%

    + % if cur_votes != '0': + ${cur_votes} + % else: + ${cur_votes} votes + % endif
    - % endif - % endfor -% endif + % endfor + % endif
    -
    +
    diff --git a/gui/slick/views/addShows_trendingShows.mako b/gui/slick/views/addShows_trendingShows.mako index f7e6518f76..d8f3bf50a6 100644 --- a/gui/slick/views/addShows_trendingShows.mako +++ b/gui/slick/views/addShows_trendingShows.mako @@ -3,8 +3,8 @@ import sickbeard %> <%block name="scripts"> - - + + <%block name="content"> % if not header is UNDEFINED: @@ -12,6 +12,7 @@ % else:

    ${title}

    % endif +
    • Manage Directories
    • @@ -23,7 +24,8 @@
      <%include file="/inc_addShowOptions.mako"/>
      -
      +
      + Sort By: - Sort Order: + + Sort Order: - Select Trakt List: + + Select Trakt List:
    -
    + +
    -
    +
    + % if traktList: % endif diff --git a/gui/slick/views/apiBuilder.mako b/gui/slick/views/apiBuilder.mako index bfa1ad63e9..74d02fc01e 100644 --- a/gui/slick/views/apiBuilder.mako +++ b/gui/slick/views/apiBuilder.mako @@ -8,23 +8,28 @@ + % if sbThemeName == "dark": % elif sbThemeName == "light": % endif + Medusa - BRANCH:[${sickbeard.BRANCH}] - ${title} + - - - - + + + + + + @@ -38,26 +43,28 @@ - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + +
    + +
    % for command in sorted(commands): @@ -115,8 +126,10 @@
    ${help['message']}
    + % if help['data']['optionalParameters'] or help['data']['requiredParameters']:

    Parameters

    + @@ -132,25 +145,32 @@ ${display_parameters_doc(help['data']['optionalParameters'], False)}
    % endif +

    Playground

    - URL: /api/${apikey}/?cmd=${command}
    + + URL: /api/${apikey}/?cmd=${command}
    + % if help['data']['requiredParameters']: - Required parameters: ${display_parameters_playground(help['data']['requiredParameters'], True, command_id)}
    + Required parameters: ${display_parameters_playground(help['data']['requiredParameters'], True, command_id)}
    % endif + % if help['data']['optionalParameters']: - Optional parameters: ${display_parameters_playground(help['data']['optionalParameters'], False, command_id)}
    + Optional parameters: ${display_parameters_playground(help['data']['optionalParameters'], False, command_id)}
    % endif -
    + +
    +
    @@ -159,15 +179,17 @@ % endfor
    + - - - + + + + <%def name="display_parameters_doc(parameters, required)"> % for parameter in parameters: @@ -195,6 +217,7 @@ var episodes = ${episodes}; % endfor + <%def name="display_parameters_playground(parameters, required, command)">
    % for parameter in parameters: @@ -203,9 +226,11 @@ var episodes = ${episodes}; allowed_values = parameter_help.get('allowedValues', '') type = parameter_help.get('type', '') %> + % if isinstance(allowed_values, list): - + % if allowed_values == [0, 1]: @@ -218,15 +243,18 @@ var episodes = ${episodes}; % elif parameter == 'indexerid': + % if 'season' in parameters: % endif + % if 'episode' in parameters: + +
    + +
    - -
    -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    -
    - -

    Look and Feel

    -

    How should the anime functions show and behave.

    -
    -
    + +
    + +
    + + +
    - -
    - - -
    -
    - -
    -
    - -
    -
    -
    +
    +
    + + + +
    + +
    + +
    + +

    Look and Feel

    +

    How should the anime functions show and behave.

    +
    +
    +
    + + +
    + +
    +
    + +

    + +
    + + +
    +
    diff --git a/gui/slick/views/config_backuprestore.mako b/gui/slick/views/config_backuprestore.mako index d2b9bc598c..e70a7a32f5 100644 --- a/gui/slick/views/config_backuprestore.mako +++ b/gui/slick/views/config_backuprestore.mako @@ -16,46 +16,61 @@ % else:

    ${title}

    % endif + <% indexer = 0 %> % if sickbeard.INDEXER_DEFAULT: <% indexer = sickbeard.INDEXER_DEFAULT %> % endif
    +
    +

    Backup

    Backup your main database file and config.

    +
    Select the folder you wish to save your backup file to: -

    - + +

    + + -
    + +
    +
    +
    +

    Restore

    Restore your main database file and config.

    +
    Select the backup file you wish to restore: -

    - + +

    + + -
    + +
    +
    @@ -64,5 +79,6 @@
    +
    diff --git a/gui/slick/views/config_general.mako b/gui/slick/views/config_general.mako index afed3c501c..d6bd1fde34 100644 --- a/gui/slick/views/config_general.mako +++ b/gui/slick/views/config_general.mako @@ -17,10 +17,12 @@ % else:

    ${title}

    % endif + <% indexer = 0 %> % if sickbeard.INDEXER_DEFAULT: <% indexer = sickbeard.INDEXER_DEFAULT %> % endif +
    @@ -30,6 +32,7 @@
  • Interface
  • Advanced Settings
  • +
    @@ -37,7 +40,9 @@

    Startup options. Indexer options. Log and show file locations.

    Some options may require a manual restart to take effect.

    +
    +
    +
    Send to trash for actions

    selected actions use trash (recycle bin) instead of the default permanent delete

    +
    +
    +
    +
    +
    +
    +
    @@ -166,58 +179,68 @@
    +
    +
    +
    + +
    + +
    +

    User Interface

    Options for visual appearance.

    +
    +
    -
    +
    +
    +
    +
    Timezone:

    display dates and times in either your timezone or the shows network timezone

    @@ -316,98 +342,113 @@
    +
    + + +
    +
    +
    +

    Web Interface

    It is recommended that you enable a username and password to secure Medusa from being tampered with remotely.

    These options require a manual restart to take effect.

    +
    +
    +
    +
    +
    +
    +
    +
    +
    @@ -417,7 +458,7 @@ @@ -426,124 +467,146 @@
    +
    + + +
    + +
    +
    +

    Advanced Settings

    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    @@ -589,27 +654,29 @@
    +
    +
    +
    +

    GitHub

    @@ -639,11 +708,11 @@ % if gh_branch: % for cur_branch in gh_branch: % if sickbeard.GIT_USERNAME and sickbeard.GIT_PASSWORD and sickbeard.DEVELOPER == 1: - + % elif sickbeard.GIT_USERNAME and sickbeard.GIT_PASSWORD and cur_branch in ['master', 'develop']: - + % elif cur_branch == 'master': - + % endif % endfor % endif @@ -654,64 +723,71 @@ % endif % if not gh_branch: -

    Error: No branches found.

    +

    Error: No branches found.

    % else:

    select branch to use (restart required)

    % endif
    +
    +
    +
    +
    + +
    +
    -
    + +
    All non-absolute folder locations are relative to ${sickbeard.DATA_DIR}
    diff --git a/gui/slick/views/config_notifications.mako b/gui/slick/views/config_notifications.mako index 5074386ca6..94979e85eb 100644 --- a/gui/slick/views/config_notifications.mako +++ b/gui/slick/views/config_notifications.mako @@ -12,6 +12,7 @@ % else:

    ${title}

    % endif +
    @@ -21,1811 +22,1874 @@
  • Devices
  • Social
  • +
    -
    -
    - -

    KODI

    -

    A free and open source cross-platform media center and home entertainment system software with a 10-foot user interface designed for the living-room TV.

    +
    + +
    + +

    KODI

    +

    A free and open source cross-platform media center and home entertainment system software with a 10-foot user interface designed for the living-room TV.

    +
    +
    +
    +
    -
    + +
    -
    -
    -
    - -
    -
    - -
    -
    - -
    -
    - -
    -
    - -
    -
    - -
    -
    - -
    -
    - - - -
    -
    - - -
    -
    - - -
    -
    Click below to test.
    - - -
    -
    -
    -
    -
    - -

    Plex Media Server

    -

    Experience your media on a visually stunning, easy to use interface on your Mac connected to your TV. Your media library has never looked this good!

    -

    For sending notifications to Plex Home Theater (PHT) clients, use the KODI notifier with port 3005.

    -
    -
    -
    -
    -
    - - - -
    -
    -
    - -
    -
    - -
    -
    -
    -
    - -
    -
    -
    - -
    -
    - -
    -
    -
    Click below to test Plex Media Server(s)
    - - -
     
    -
    -
    -
    -
    -
    -
    -
    - -
    -
    -
    -
    - -
    +
    + +
    +
    + +
    +
    + +
    +
    + +
    +
    + + + +
    +
    + + +
    +
    + + +
    +
    Click below to test.
    + + +
    + +
    + +
    + +
    +
    + +

    Plex Media Server

    +

    Experience your media on a visually stunning, easy to use interface on your Mac connected to your TV. Your media library has never looked this good!

    +

    For sending notifications to Plex Home Theater (PHT) clients, use the KODI notifier with port 3005.

    +
    +
    +
    + +
    + +
    +
    + + + +
    +
    -
    -
    +
    +
    -
    -
    +
    -
    -
    +
    +
    Click below to test Plex Media Server(s)
    + + +
     
    +
    -
    -
    Click below to test Plex Home Theater(s)
    - - -

    Note: some Plex Home Theaters do not support notifications e.g. Plexapp for Samsung TVs

    -
    -
    -
    -
    -
    -
    - -

    Emby

    -

    A home media server built using other popular open source technologies.

    +
    +
    + +
    + +
    + +
    +
    +
    -
    + +
    -
    -
    -
    - - -
    -
    - -
    -
    Click below to test.
    - - -
    -
    -
    -
    -
    - -

    NMJ

    -

    The Networked Media Jukebox, or NMJ, is the official media jukebox interface made available for the Popcorn Hour 200-series.

    -
    -
    -
    -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    Click below to test.
    - - -
    -
    -
    -
    -
    - -

    NMJv2

    -

    The Networked Media Jukebox, or NMJv2, is the official media jukebox interface made available for the Popcorn Hour 300 & 400-series.

    -
    -
    -
    -
    -
    - - -
    -
    - Database location +
    +
    + +
    +
    -
    -
    - -
    - -
    -
    Click below to test.
    - - -
    -
    -
    -
    -
    - -

    Synology

    -

    The Synology DiskStation NAS.

    -

    Synology Indexer is the daemon running on the Synology NAS to build its media database.

    +
    + +
    +
    Click below to test Plex Home Theater(s)
    + + +

    Note: some Plex Home Theaters do not support notifications e.g. Plexapp for Samsung TVs

    +
    +
    + +
    + + +
    +
    + +

    Emby

    +

    A home media server built using other popular open source technologies.

    +
    +
    +
    +
    -
    +
    -
    -
    +
    + +
    +
    Click below to test.
    + -
    -
    -
    -
    -
    - -

    Synology Notifier

    -

    Synology Notifier is the notification system of Synology DSM

    +
    + + +
    + + +
    +
    + +

    NMJ

    +

    The Networked Media Jukebox, or NMJ, is the official media jukebox interface made available for the Popcorn Hour 200-series.

    +
    +
    +
    +
    -
    + +
    -
    -
    -
    - -
    -
    - -
    -
    - -
    - -
    -
    -
    -
    -
    - -

    pyTivo

    -

    pyTivo is both an HMO and GoBack server. This notifier will load the completed downloads to your Tivo.

    -
    -
    +
    -
    -
    -
    - - -
    -
    - - -
    -
    -
    + + +
    + +
    +
    + +

    NMJv2

    +

    The Networked Media Jukebox, or NMJv2, is the official media jukebox interface made available for the Popcorn Hour 300 & 400-series.

    +
    +
    +
    + +
    + +
    +
    + + +
    +
    + Database location + + -
    - -
    -
    -
    -
    -
    -
    -
    - -

    Growl

    -

    A cross-platform unobtrusive global notification system.

    + +
    +
    + + +
    +
    + + +
    +
    + + +
    +
    Click below to test.
    + + +
    + + +
    + + +
    +
    + +

    Synology

    +

    The Synology DiskStation NAS.

    +

    Synology Indexer is the daemon running on the Synology NAS to build its media database.

    +
    + +
    +
    + +
    -
    + +
    + +
    + +
    +
    + + +
    +
    + +

    Synology Notifier

    +

    Synology Notifier is the notification system of Synology DSM

    +
    + +
    +
    + + +
    +
    -
    -
    -
    - -
    -
    - -
    -
    - -
    -
    - - -
    -
    - - - -
    -
    Click below to register and test Growl, this is required for Growl notifications to work.
    - - -
    -
    -
    -
    -
    - -

    Prowl

    -

    A Growl client for iOS.

    +
    + +
    +
    + +
    + +
    + +
    + + +
    +
    + +

    pyTivo

    +

    pyTivo is both an HMO and GoBack server. This notifier will load the completed downloads to your Tivo.

    +
    +
    +
    + + +
    + +
    +
    + + +
    +
    + + +
    +
    + + +
    + +
    + +
    +
    + +
    + + +
    +
    +
    + +

    Growl

    +

    A cross-platform unobtrusive global notification system.

    +
    +
    +
    +
    -
    + +
    -
    -
    -
    - -
    -
    - -
    -
    - -
    -
    - -
    -
    - - -
    -
    - - - - -
    -
    - - -
    -
    Click below to test.
    - - -
    -
    -
    -
    -
    - -

    Libnotify

    -

    The standard desktop notification API for Linux/*nix systems. This notifier will only function if the pynotify module is installed (Ubuntu/Debian package python-notify).

    +
    + +
    +
    + +
    +
    + + +
    +
    + + + +
    +
    Click below to register and test Growl, this is required for Growl notifications to work.
    + + +
    + + +
    + + +
    +
    + +

    Prowl

    +

    A Growl client for iOS.

    +
    +
    +
    + +
    + +
    +
    + +
    +
    + +
    +
    + +
    +
    + +
    +
    + + +
    +
    + + + + +
    +
    + + +
    +
    Click below to test.
    + + +
    + +
    +
    + + +
    +
    + +

    Libnotify

    +

    The standard desktop notification API for Linux/*nix systems. This notifier will only function if the pynotify module is installed (Ubuntu/Debian package python-notify).

    +
    +
    +
    + +
    + +
    +
    + +
    +
    + +
    +
    + +
    +
    Click below to test.
    + + +
    + +
    +
    + + +
    +
    + +

    Pushover

    +

    Pushover makes it easy to send real-time notifications to your Android and iOS devices.

    +
    +
    +
    + +
    + +
    +
    + +
    +
    + +
    +
    + +
    +
    + + +
    +
    + + +
    +
    + + +
    +
    + + +
    +
    Click below to test.
    + + +
    + +
    +
    + +
    +
    + +

    Boxcar 2

    +

    Read your messages where and when you want them!

    +
    +
    +
    +
    -
    + +
    +
    + +
    +
    + +
    +
    + +
    +
    + + +
    +
    Click below to test.
    + + +
    + +
    +
    + +
    +
    + +

    Notify My Android

    +

    Notify My Android is a Prowl-like Android App and API that offers an easy way to send notifications from your application directly to your Android device.

    +
    +
    +
    + +
    + +
    +
    + +
    +
    + +
    +
    + +
    +
    + + +
    +
    + + +
    +
    Click below to test.
    + + +
    + +
    +
    + +
    +
    + +

    Pushalot

    +

    Pushalot is a platform for receiving custom push notifications to connected devices running Windows Phone or Windows 8.

    +
    +
    +
    +
    + +
    +
    + +
    +
    + +
    +
    + +
    +
    + + +
    +
    Click below to test.
    + + +
    + +
    +
    + +
    +
    + +

    Pushbullet

    +

    Pushbullet is a platform for receiving custom push notifications to connected devices running Android and desktop Chrome browsers.

    +
    +
    +
    + +
    + +
    +
    + +
    +
    + +
    +
    + +
    +
    + + +
    +
    + + +
    +
    Click below to test.
    + + +
    + +
    +
    +
    +
    + +

    Free Mobile

    +

    Free Mobile is a famous French cellular network provider.
    It provides to their customer a free SMS API.

    +
    +
    +
    + +
    + +
    +
    + +
    +
    + +
    +
    + +
    -
    -
    -
    - -
    -
    - -
    -
    - -
    -
    Click below to test.
    - - -
    -
    -
    -
    -
    - -

    Pushover

    -

    Pushover makes it easy to send real-time notifications to your Android and iOS devices.

    +
    + + +
    +
    Click below to test your settings.
    + + +
    + + +
    +
    +
    + +

    Telegram

    +

    Telegram is a cloud-based instant messaging service.

    +
    +
    +
    +
    -
    + +
    -
    -
    -
    - -
    -
    - -
    -
    - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    Click below to test.
    - - -
    -
    -
    -
    -
    - -

    Boxcar 2

    -

    Read your messages where and when you want them!

    -
    -
    -
    -
    -
    - -
    -
    - -
    -
    - -
    -
    - - -
    -
    Click below to test.
    - - -
    -
    -
    -
    -
    - -

    Notify My Android

    -

    Notify My Android is a Prowl-like Android App and API that offers an easy way to send notifications from your application directly to your Android device.

    -
    -
    -
    -
    -
    - -
    -
    - -
    -
    - -
    -
    - - -
    -
    - - -
    -
    Click below to test.
    - - -
    -
    -
    -
    -
    - -

    Pushalot

    -

    Pushalot is a platform for receiving custom push notifications to connected devices running Windows Phone or Windows 8.

    +
    + + +
    +
    + + +
    +
    Click below to test your settings.
    + + +
    + + +
    + +
    + +
    +
    +
    + +

    Twitter

    +

    A social networking and microblogging service, enabling its users to send and read other users' messages called tweets.

    +
    +
    +
    + +
    -
    + +
    -
    -
    -
    - -
    -
    - -
    -
    - -
    -
    - - -
    -
    Click below to test.
    - - -
    -
    -
    -
    -
    - -

    Pushbullet

    -

    Pushbullet is a platform for receiving custom push notifications to connected devices running Android and desktop Chrome browsers.

    -
    -
    -
    -
    -
    - -
    -
    - -
    -
    - -
    -
    - - -
    -
    - - -
    -
    Click below to test.
    - - -
    -
    -
    -
    -
    - -

    Free Mobile

    -

    Free Mobile is a famous French cellular network provider.
    It provides to their customer a free SMS API.

    -
    -
    -
    -
    -
    - -
    -
    - -
    -
    - -
    -
    - - -
    -
    - - -
    -
    Click below to test your settings.
    - - -
    -
    -
    -
    -
    - -

    Telegram

    -

    Telegram is a cloud-based instant messaging service.

    -
    -
    -
    -
    -
    - -
    -
    - -
    -
    - -
    -
    - - -
    -
    - - -
    -
    Click below to test your settings.
    - - -
    -
    -
    -
    -
    -
    -
    - -

    Twitter

    -

    A social networking and microblogging service, enabling its users to send and read other users' messages called tweets.

    +
    + +

    + Twitter account to send Direct Messages to (must follow you) +

    +
    +
    + + +
    +
    + + +
    + +
    Click below to test.
    + + +
    + + +
    + + +
    +
    + +

    Trakt

    +

    trakt helps keep a record of what TV shows and movies you are watching. Based on your favorites, trakt recommends additional shows and movies you'll enjoy!

    +
    +
    +
    +
    -
    + +
    +
    + +

    + username of your Trakt account. +

    +
    + + +
    + +

    + PIN code to authorize Medusa to access Trakt on your behalf. +

    +
    + +
    + +

    + + Seconds to wait for Trakt API to respond. (Use 0 to wait forever) + +

    +
    -
    +
    +
    -
    -
    - -
    -
    - -
    -
    - -
    +
    -
    -
    - -

    - Twitter account to send Direct Messages to (must follow you) -

    -
    -
    - - -
    -
    - - -
    - -
    Click below to test.
    - - -
    -
    -
    -
    -
    - -

    Trakt

    -

    trakt helps keep a record of what TV shows and movies you are watching. Based on your favorites, trakt recommends additional shows and movies you'll enjoy!

    -
    -
    -
    -
    +
    +
    + -
    -
    -
    -
    +
    +
    + -

    - username of your Trakt account. -

    -
    - - -
    -
    -
    - -

    +

    remove an episode from your watchlist after it is downloaded.

    -

    +
    -
    -
    -
    -
    - -
    -
    -
    -
    -
    - - -
    -
    - -
    -
    - -
    -
    - -
    -
    - -
    -
    -
    - - -
    -
    Click below to test.
    - - -
    - -
    -
    -
    - -

    Email

    -

    Allows configuration of email notifications on a per show basis.

    -
    -
    -
    -
    +
    + + +
    +
    Click below to test.
    + + +
    + +
    + +
    +
    + +

    Email

    +

    Allows configuration of email notifications on a per show basis.

    +
    +
    +
    + +
    + +
    +
    + -
    -
    -
    - -
    -
    - -
    -
    - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - - - -
    -
    - Click below to test. -
    - - -
    -
    -
    -
    - - -

    - - + +
    + +
    +
    + +
    +
    + + +
    +
    + + +
    +
    + + +
    +
    + +
    +
    + + +
    +
    + + +
    +
    + + +
    +
    + + +
    +
    + + + + +
    + +
    Click below to test.
    + + + + + + + + + +

    + + + +
    diff --git a/gui/slick/views/config_postProcessing.mako b/gui/slick/views/config_postProcessing.mako index 29d362b93c..afc5c1dfdf 100644 --- a/gui/slick/views/config_postProcessing.mako +++ b/gui/slick/views/config_postProcessing.mako @@ -13,264 +13,871 @@ %> <%block name="content">
    - % if not header is UNDEFINED: -

    ${header}

    - % else: -

    ${title}

    - % endif -
    -
    -
    -
    - -
    -
    -

    Post-Processing

    -

    Settings that dictate how Medusa should process completed downloads.

    +% if not header is UNDEFINED: +

    ${header}

    +% else: +

    ${title}

    +% endif +
    +
    + +
    + +
    +
    +

    Post-Processing

    +

    Settings that dictate how Medusa should process completed downloads.

    +
    +
    +
    + + +
    -
    -
    - - - -
    -
    - - +
    + + + +
    +
    + + + +
    +
    + + +
    +
    + + +
    +
    + + +
    +
    + + +
    +
    + + +
    +
    + + +
    +
    + + +
    +
    + + +
    +
    + + +
    +
    + + +
    +
    + + + +
    +
    + + +
    +
    + + + +
    +
    + + +
    +
    + + + +
    +
    + + +
    +
    +
    +
    +
    + +
    +

    Episode Naming

    +

    How Medusa will name and sort your episodes.

    +
    + +
    +
    + +
    + +
    +
    -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - + NOTE: Don't forget to add quality pattern. Otherwise after post-processing the episode will have UNKNOWN quality +
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - + + -
    - - +
    + +
    + +
    + +
    +

    Single-EP Sample:

    +
    +  
    -
    - - - +
    +
    + +
    +

    Multi-EP sample:

    +
    +  
    +
    +
    + +
    + + + +
    + +
    + + +
    + +
    -
    -
    - - - -
    -
    - - + +
    +
    + +
    + + +
    + +
    +

    Sample:

    +
    +   +
    +
    + +
    + +
    + + +
    + +
    - - -
    -
    - - + +
    +
    + +
    + + +
    + +
    +

    Sample:

    +
    +   +
    +
    -
    -
    -
    -
    -
    -

    Episode Naming

    -

    How Medusa will name and sort your episodes.

    + +
    + + +
    + +
    -
    + +
    -
    -
    + +
    -
    - +
    +
    -
    -
    -

    Single-EP Sample:

    + +
    +

    Single-EP Anime Sample:

    -   +  
    -
    +
    -
    -

    Multi-EP sample:

    + +
    +

    Multi-EP Anime sample:

    -   +  
    -
    +
    +
    - -
    +
    - -
    -
    -
    - -
    -
    -
    - -
    - -
    -
    -

    Sample:

    -
    -   -
    -
    -
    -
    -
    - -
    -
    -
    - -
    -
    -
    - -
    - -
    -
    -

    Sample:

    -
    -   -
    -
    -
    -
    - +
    - -
    -
    -
    - -
    -
    -
    - -
    - -
    -
    - -
    -
    -

    Single-EP Anime Sample:

    -
    -   -
    -
    -
    -
    -

    Multi-EP Anime sample:

    -
    -   -
    -
    -
    -
    - - - -
    -
    - - - -
    -
    - - - -
    -
    -
    -
    -
    -
    -
    -

    Metadata

    -

    The data associated to the data. These are files associated to a TV show in the form of images and text that, when supported, will enhance the viewing experience.

    -
    -
    -
    -
    - % for (cur_name, cur_generator) in m_dict.iteritems(): - <% cur_metadata_inst = sickbeard.metadata_provider_dict[cur_generator.name] %> - <% cur_id = cur_generator.get_id() %> -
    - + +
    +
    +
    + +
    +
    +

    Metadata

    +

    The data associated to the data. These are files associated to a TV show in the form of images and text that, when supported, will enhance the viewing experience.

    +
    + +
    +
    + + Toggle the metadata options that you wish to be created. Multiple targets may be used. +
    + + % for (cur_name, cur_generator) in m_dict.iteritems(): + <% cur_metadata_inst = sickbeard.metadata_provider_dict[cur_generator.name] %> + <% cur_id = cur_generator.get_id() %> +
    +
    -
    -
    -
    All non-absolute folder locations are relative to ${sickbeard.DATA_DIR}
    - - -
    + +
    + % endfor + +

    + +
    + +
    + +
    +
    All non-absolute folder locations are relative to ${sickbeard.DATA_DIR}
    + +
    -
    -
    +
    +
    diff --git a/gui/slick/views/config_providers.mako b/gui/slick/views/config_providers.mako index 7199302aca..3c4c5d0688 100644 --- a/gui/slick/views/config_providers.mako +++ b/gui/slick/views/config_providers.mako @@ -5,7 +5,7 @@ from sickrage.providers.GenericProvider import GenericProvider %> <%block name="scripts"> - + - - - - + + + + + <%block name="content"> <%namespace file="/inc_defs.mako" import="renderQualityPill"/> - +
    Change Show: - + - +
    +
    +

    ${show.name}

    + % if seasonResults: ##There is a special/season_0?## % if int(seasonResults[-1]["season"]) == 0: @@ -58,9 +63,10 @@ % if season_special: Display Specials: - ${('Show', 'Hide')[bool(sickbeard.DISPLAY_SHOW_SPECIALS)]} + ${('Show', 'Hide')[bool(sickbeard.DISPLAY_SHOW_SPECIALS)]} % endif +
    % if (len(seasonResults) > 14): @@ -84,30 +90,38 @@ % endfor % endif +
    % endif + +
    + % if show_message:
    ${show_message}
    % endif +
    - +
    +
    +
    % if 'rating' in show.imdb_info: - <% rating_tip = str(show.imdb_info['rating']) + " / 10" + " Stars" + "
    " + str(show.imdb_info['votes']) + " Votes" %> + <% rating_tip = str(show.imdb_info['rating']) + " / 10" + " Stars" + "
    " + str(show.imdb_info['votes']) + " Votes" %> ${show.imdb_info['rating']} % endif + % if not show.imdbid: (${show.startyear}) - ${show.runtime} minutes - % else: % if 'country_codes' in show.imdb_info: % for country in show.imdb_info['country_codes'].split('|'): - + % endfor % endif @@ -116,19 +130,23 @@ % endif ${show.imdb_info.get('runtimes') or show.runtime} minutes + - [imdb] + [imdb] % endif + - ${sickbeard.indexerApi(show.indexer).name} + ${sickbeard.indexerApi(show.indexer).name} + % if xem_numbering or xem_absolute_numbering: - [xem] + [xem] % endif
    +
      % if show.imdb_info.get('genres'): @@ -142,6 +160,7 @@ % endif
    +
    <% anyQualities, bestQualities = Quality.splitQuality(int(show.quality)) %> @@ -150,12 +169,13 @@ ${renderQualityPill(show.quality)} % else: % if anyQualities: - Allowed: ${", ".join([capture(renderQualityPill, x) for x in sorted(anyQualities)])}${("", "
    ")[bool(bestQualities)]} + Allowed: ${", ".join([capture(renderQualityPill, x) for x in sorted(anyQualities)])}${("", "
    ")[bool(bestQualities)]} % endif % if bestQualities: Preferred: ${", ".join([capture(renderQualityPill, x) for x in sorted(bestQualities)])} % endif % endif + % if show.network and show.airs: % elif show.network: @@ -168,11 +188,12 @@ % if showLoc[1]: % else: - + % endif % if all_scene_exceptions: % endif + % if require_words: % endif @@ -197,28 +218,33 @@ % endif + +
    Originally Airs: ${show.airs} ${("(invalid Timeformat) ", "")[network_timezones.test_timeformat(show.airs)]} on ${show.network}
    Location: ${showLoc[0]}
    Location: ${showLoc[0]} (Missing)
    Location: ${showLoc[0]} (Missing)
    Scene Name:${all_scene_exceptions}
    Required Words: ${require_words}
    ${', '.join(bwl.blacklist)}
    Size:${pretty_file_size(sickbeard.helpers.get_size(showLoc[0]))}
    + <% info_flag = subtitles.code_from_code(show.lang) if show.lang else '' %> - + % if sickbeard.USE_SUBTITLES: - + % endif - - - - - - - + + + + + + +
    Info Language:${show.lang}
    Info Language:${show.lang}
    Subtitles: ${(
    Subtitles: ${(
    Season Folders: ${(
    Paused: ${(
    Air-by-Date: ${(
    Sports: ${(
    Anime: ${(
    DVD Order: ${(
    Scene Numbering: ${(
    Season Folders: ${(
    Paused: ${(
    Air-by-Date: ${(
    Sports: ${(
    Anime: ${(
    DVD Order: ${(
    Scene Numbering: ${(
    +
    +
    - Change selected episodes to:
    + Change selected episodes to:
    -
    + +
    +
    <% total_snatched = epCounts[Overview.SNATCHED] + epCounts[Overview.SNATCHED_PROPER] + epCounts[Overview.SNATCHED_BEST] %> @@ -244,15 +272,17 @@
    +
    -
    -
    -
    +
    +
    +
    + <% curSeason = -1 %> <% odd = 0 %> @@ -261,31 +291,38 @@ epStr = str(epResult["season"]) + "x" + str(epResult["episode"]) if not epStr in epCats: continue + if not sickbeard.DISPLAY_SHOW_SPECIALS and int(epResult["season"]) == 0: continue + scene = False scene_anime = False if not show.air_by_date and not show.is_sports and not show.is_anime and show.is_scene: scene = True elif not show.air_by_date and not show.is_sports and show.is_anime and show.is_scene: scene_anime = True + (dfltSeas, dfltEpis, dfltAbsolute) = (0, 0, 0) if (epResult["season"], epResult["episode"]) in xem_numbering: (dfltSeas, dfltEpis) = xem_numbering[(epResult["season"], epResult["episode"])] + if epResult["absolute_number"] in xem_absolute_numbering: dfltAbsolute = xem_absolute_numbering[epResult["absolute_number"]] + if epResult["absolute_number"] in scene_absolute_numbering: scAbsolute = scene_absolute_numbering[epResult["absolute_number"]] dfltAbsNumbering = False else: scAbsolute = dfltAbsolute dfltAbsNumbering = True + if (epResult["season"], epResult["episode"]) in scene_numbering: (scSeas, scEpis) = scene_numbering[(epResult["season"], epResult["episode"])] dfltEpNumbering = False else: (scSeas, scEpis) = (dfltSeas, dfltEpis) dfltEpNumbering = True + epLoc = epResult["location"] if epLoc and show._location and epLoc.lower().startswith(show._location.lower()): epLoc = epLoc[len(show._location)+1:] @@ -317,7 +354,7 @@

    ${("Specials", "Season " + str(epResult["season"]))[int(epResult["season"]) > 0]} % if not any([i for i in sql_results if epResult['season'] == i['season'] and int(i['status']) == 1]): - search + search % endif

    0]}>
    @@ -352,7 +389,7 @@ - + + @@ -473,7 +510,7 @@ @@ -486,23 +523,25 @@ % endfor

    ${("Specials", "Season " + str(epResult["season"]))[bool(int(epResult["season"]))]} % if not any([i for i in sql_results if epResult['season'] == i['season'] and int(i['status']) == 1]): - search + search % endif

    @@ -396,8 +433,8 @@ % endif -
    ${(${(${(${( <% text = str(epResult['episode']) @@ -417,7 +454,7 @@ % else: value="${str(scSeas)}x${str(scEpis)}" % endif - style="padding: 0; text-align: center; max-width: 60px;"/> + style="padding: 0; text-align: center; max-width: 60px;" autocapitalize="off" /> + style="padding: 0; text-align: center; max-width: 60px;" autocapitalize="off" /> % if epResult["description"] != "" and epResult["description"] is not None: - + % else: - + % endif ${epResult["name"]} % for flag in (epResult["subtitles"] or '').split(','): % if flag.strip(): - ${subtitles.name_from_code(flag)} + ${subtitles.name_from_code(flag)} % endif % endfor
    + + + + + diff --git a/gui/slick/views/editShow.mako b/gui/slick/views/editShow.mako index 154bad2598..6c4f1d88e9 100644 --- a/gui/slick/views/editShow.mako +++ b/gui/slick/views/editShow.mako @@ -8,44 +8,53 @@ from sickrage.helper import exceptions from sickbeard import scene_exceptions %> + <%block name="metas"> + <%block name="scripts"> - - + + % if show.is_anime: - + % endif + <%block name="content"> % if not header is UNDEFINED:

    ${header}

    % else:

    ${title}

    % endif +
    +
    +
    +

    Main Settings

    +
    +
    +
    +
    +
    +
    +
    +

    Format Settings

    +
    +
    +
    +
    +
    +
    +
    +

    Advanced Settings

    +
    +
    diff --git a/gui/slick/views/manage_failedDownloads.mako b/gui/slick/views/manage_failedDownloads.mako index 5f4aa3c274..243e22d6a7 100644 --- a/gui/slick/views/manage_failedDownloads.mako +++ b/gui/slick/views/manage_failedDownloads.mako @@ -15,21 +15,23 @@ % else:

    ${title}

    % endif +
    Limit:
    + - @@ -53,9 +55,9 @@ diff --git a/gui/slick/views/manage_manageSearches.mako b/gui/slick/views/manage_manageSearches.mako index fa4b3df4be..b4ce4d1176 100644 --- a/gui/slick/views/manage_manageSearches.mako +++ b/gui/slick/views/manage_manageSearches.mako @@ -6,7 +6,7 @@ from sickbeard.common import Quality, qualityPresets, statusStrings, qualityPresetStrings, cpu_presets %> <%block name="scripts"> - + <%block name="content">
    @@ -15,47 +15,52 @@ % else:

    ${title}

    % endif +

    Backlog Search:

    - Force - ${('pause', 'Unpause')[bool(backlogPaused)]} + Force + ${('pause', 'Unpause')[bool(backlogPaused)]} % if not backlogRunning: - Not in progress
    + Not in progress
    % else: - ${'Paused:' if backlogPaused else ''} - Currently running
    + ${('', 'Paused:')[bool(backlogPaused)]} + Currently running
    % endif -
    +
    +

    Daily Search:

    - Force -${('Not in progress', 'In Progress')[dailySearchStatus]}
    -
    + Force +${('Not in progress', 'In Progress')[dailySearchStatus]}
    +
    +

    Find Propers Search:

    - Force + Force % if not sickbeard.DOWNLOAD_PROPERS: - Propers search disabled
    + Propers search disabled
    % elif not findPropersStatus: - Not in progress
    + Not in progress
    % else: - In Progress
    + In Progress
    % endif -
    +
    +

    Subtitle Search:

    - Force + Force % if not sickbeard.USE_SUBTITLES: - Subtitle search disabled
    + Subtitle search disabled
    % elif not subtitlesFinderStatus: - Not in progress
    + Not in progress
    % else: - In Progress
    + In Progress
    % endif -
    +
    +

    Search Queue:

    -Backlog: ${searchQueueLength['backlog']} pending items
    -Daily: ${searchQueueLength['daily']} pending items
    -Forced: ${forcedSearchQueueLength['forced_search']} pending items
    -Manual: ${forcedSearchQueueLength['manual_search']} pending items
    -Failed: ${forcedSearchQueueLength['failed']} pending items
    +Backlog: ${searchQueueLength['backlog']} pending items
    +Daily: ${searchQueueLength['daily']} pending items
    +Forced: ${forcedSearchQueueLength['forced_search']} pending items
    +Manual: ${forcedSearchQueueLength['manual_search']} pending items
    +Failed: ${forcedSearchQueueLength['failed']} pending items
    diff --git a/gui/slick/views/manage_massEdit.mako b/gui/slick/views/manage_massEdit.mako index 2e85af0013..a2deb72897 100644 --- a/gui/slick/views/manage_massEdit.mako +++ b/gui/slick/views/manage_massEdit.mako @@ -6,40 +6,51 @@ from sickbeard.common import Quality, qualityPresets, qualityPresetStrings, statusStrings from sickrage.helper import exceptions %> + <%block name="scripts"> <% if quality_value is not None: initial_quality = int(quality_value) else: initial_quality = common.SD + anyQualities, bestQualities = common.Quality.splitQuality(initial_quality) %> - - + + + <%block name="content"> +
    +
    +
    +

    Main Settings

    - NOTE: Changing any settings marked with (*) will force a refresh of the selected shows.
    -
    + + ==> Changing any settings marked with (*) will force a refresh of the selected shows.
    +
    +
    +
    +
    % endfor @@ -71,6 +82,7 @@ +
    + + + diff --git a/gui/slick/views/manage_subtitleMissed.mako b/gui/slick/views/manage_subtitleMissed.mako index fc202ab7be..41e8b838c6 100644 --- a/gui/slick/views/manage_subtitleMissed.mako +++ b/gui/slick/views/manage_subtitleMissed.mako @@ -18,16 +18,16 @@ % if not whichSubs or (whichSubs and not ep_counts): % if whichSubs:

    All of your episodes have ${subsLanguage} subtitles.

    -
    +
    % endif - + + % if sickbeard.SUBTITLES_MULTI: Manage episodes without % else: Manage episodes without + % endif + % else: ##Strange that this is used by js but is an input outside of any form? - + % if sickbeard.SUBTITLES_MULTI:

    Episodes without ${subsLanguage} subtitles.

    % else: @@ -56,18 +57,18 @@ % endif % endfor % endif -
    +
    Download missed subtitles for selected episodes
    - - + +
    -
    +
    Release Size ProviderRemove
    +
    Remove
    <% provider = providers.getProviderClass(GenericProvider.make_id(hItem["provider"])) %> % if provider is not None: - ${provider.name} + ${provider.name} % else: - missing provider + missing provider % endif
    % for cur_indexer_id in sorted_show_ids: - + % endfor
    ${show_names[cur_indexer_id]} (${ep_counts[cur_indexer_id]}) ${show_names[cur_indexer_id]} (${ep_counts[cur_indexer_id]})
    diff --git a/gui/slick/views/manage_torrents.mako b/gui/slick/views/manage_torrents.mako index 0f49de5854..b4a95fa310 100644 --- a/gui/slick/views/manage_torrents.mako +++ b/gui/slick/views/manage_torrents.mako @@ -1,6 +1,6 @@ <%inherit file="/layouts/main.mako"/> <%block name="scripts"> - + <%block name="content"> % if not header is UNDEFINED: @@ -8,6 +8,7 @@ % else:

    ${title}

    % endif + ${info_download_station} - + diff --git a/gui/slick/views/partials/alerts.mako b/gui/slick/views/partials/alerts.mako deleted file mode 100644 index ab0c4c1748..0000000000 --- a/gui/slick/views/partials/alerts.mako +++ /dev/null @@ -1,15 +0,0 @@ -<%! - from sickbeard import BRANCH, DEVELOPER, NEWEST_VERSION_STRING -%> - -% if BRANCH and BRANCH != 'master' and not DEVELOPER and loggedIn: - -% endif -% if NEWEST_VERSION_STRING and loggedIn: - -% endif - diff --git a/gui/slick/views/partials/footer.mako b/gui/slick/views/partials/footer.mako index c3f70f49ca..2ab04a67be 100644 --- a/gui/slick/views/partials/footer.mako +++ b/gui/slick/views/partials/footer.mako @@ -1,67 +1,62 @@ <%! - from datetime import datetime - from time import time - from contextlib2 import suppress - import os + import datetime import re - from sickbeard import ( - dailySearchScheduler as daily_search_scheduler, - backlogSearchScheduler as backlog_search_scheduler, - BRANCH, DATE_PRESET, TIME_PRESET - ) + import sickbeard from sickrage.helper.common import pretty_file_size from sickrage.show.Show import Show + from time import time - mem_usage = None - with suppress(ImportError): - from psutil import Process - from os import getpid - mem_usage = Process(getpid()).memory_info().rss - - with suppress(ImportError): - if not mem_usage: - import resource # resource module is unix only - mem_usage = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss - - stats = Show.overall_stats() - ep_downloaded = stats['episodes']['downloaded'] - ep_snatched = stats['episodes']['snatched'] - ep_total = stats['episodes']['total'] - ep_percentage = '' if ep_total == 0 else '(%s%%)' % re.sub(r'(\d+)(\.\d)\d+', r'\1\2', str((float(ep_downloaded)/float(ep_total))*100)) + # resource module is unix only + has_resource_module = True + try: + import resource + except ImportError: + has_resource_module = False +%> +<% + srRoot = sickbeard.WEB_ROOT %> - % if loggedIn:
    - - - - - - - - - - - - + + + + + + + + + + + + % endif - diff --git a/gui/slick/views/partials/header.mako b/gui/slick/views/partials/header.mako index 06f5afd9cf..595e5fe1e5 100644 --- a/gui/slick/views/partials/header.mako +++ b/gui/slick/views/partials/header.mako @@ -5,8 +5,17 @@ from sickrage.helper.common import pretty_file_size from sickrage.show.Show import Show from time import time + + # resource module is unix only + has_resource_module = True + try: + import resource + except ImportError: + has_resource_module = False +%> +<% + srRoot = sickbeard.WEB_ROOT %> - - diff --git a/gui/slick/views/partials/home/banner.mako b/gui/slick/views/partials/home/banner.mako index 95d510042f..d20da4260d 100644 --- a/gui/slick/views/partials/home/banner.mako +++ b/gui/slick/views/partials/home/banner.mako @@ -29,7 +29,7 @@ - Add ${('Show', 'Anime')[curListType == 'Anime']} + Add ${('Show', 'Anime')[curListType == 'Anime']}       @@ -75,32 +75,42 @@ cur_total = 0 show_size = 0 download_stat_tip = '' + if curShow.indexerid in show_stat: cur_airs_next = show_stat[curShow.indexerid]['ep_airs_next'] cur_airs_prev = show_stat[curShow.indexerid]['ep_airs_prev'] + cur_snatched = show_stat[curShow.indexerid]['ep_snatched'] if not cur_snatched: cur_snatched = 0 + cur_downloaded = show_stat[curShow.indexerid]['ep_downloaded'] if not cur_downloaded: cur_downloaded = 0 + cur_total = show_stat[curShow.indexerid]['ep_total'] if not cur_total: cur_total = 0 + show_size = show_stat[curShow.indexerid]['show_size'] + download_stat = str(cur_downloaded) download_stat_tip = "Downloaded: " + str(cur_downloaded) + if cur_snatched: download_stat = download_stat + "+" + str(cur_snatched) download_stat_tip = download_stat_tip + " " + "Snatched: " + str(cur_snatched) + download_stat = download_stat + " / " + str(cur_total) download_stat_tip = download_stat_tip + " " + "Total: " + str(cur_total) + nom = cur_downloaded if cur_total: den = cur_total else: den = 1 download_stat_tip = "Unaired" + progressbar_percent = nom * 100 / den %> @@ -116,6 +126,7 @@ % else: % endif + % if cur_airs_prev: <% airDate = sbdatetime.sbdatetime.convert_to_setting(network_timezones.parse_date_time(cur_airs_prev, curShow.airs, curShow.network)) %> % try: @@ -128,34 +139,41 @@ % else: % endif + ${curShow.name} + % if curShow.network: - ${curShow.network} + ${curShow.network} ${curShow.network} % else: - No Network + No Network No Network % endif + ${renderQualityPill(curShow.quality, showTitle=True)} + ## This first span is used for sorting and is never displayed to user ${download_stat}
    ${download_stat} + ${pretty_file_size(show_size)} + <% paused = int(curShow.paused) == 0 and curShow.status == 'Continuing' %> - ${('No', 'Yes')[bool(paused)]} + ${('No', 'Yes')[bool(paused)]} + <% display_status = curShow.status diff --git a/gui/slick/views/partials/home/poster.mako b/gui/slick/views/partials/home/poster.mako index 6d669675fa..23ec7be384 100644 --- a/gui/slick/views/partials/home/poster.mako +++ b/gui/slick/views/partials/home/poster.mako @@ -20,15 +20,18 @@ % for curLoadingShow in sickbeard.showQueueScheduler.action.loadingShowList: % if curLoadingShow.show is None:
    - +
    Loading... (${curLoadingShow.show_name})
    + % endif % endfor + <% myShowList.sort(lambda x, y: cmp(x.name, y.name)) %> % for curShow in myShowList: + <% cur_airs_next = '' cur_snatched = 0 @@ -36,36 +39,47 @@ cur_total = 0 download_stat_tip = '' display_status = curShow.status + if None is not display_status: if re.search(r'(?i)(?:new|returning)\s*series', curShow.status): display_status = 'Continuing' elif re.search(r'(?i)(?:nded)', curShow.status): display_status = 'Ended' + if curShow.indexerid in show_stat: cur_airs_next = show_stat[curShow.indexerid]['ep_airs_next'] + cur_snatched = show_stat[curShow.indexerid]['ep_snatched'] if not cur_snatched: cur_snatched = 0 + cur_downloaded = show_stat[curShow.indexerid]['ep_downloaded'] if not cur_downloaded: cur_downloaded = 0 + cur_total = show_stat[curShow.indexerid]['ep_total'] if not cur_total: cur_total = 0 + download_stat = str(cur_downloaded) download_stat_tip = "Downloaded: " + str(cur_downloaded) + if cur_snatched: download_stat = download_stat + "+" + str(cur_snatched) download_stat_tip = download_stat_tip + " " + "Snatched: " + str(cur_snatched) + download_stat = download_stat + " / " + str(cur_total) download_stat_tip = download_stat_tip + " " + "Total: " + str(cur_total) + nom = cur_downloaded if cur_total: den = cur_total else: den = 1 download_stat_tip = "Unaired" + progressbar_percent = nom * 100 / den + data_date = '6000000000.0' if cur_airs_next: data_date = calendar.timegm(sbdatetime.sbdatetime.convert_to_setting(network_timezones.parse_date_time(cur_airs_next, curShow.airs, curShow.network)).timetuple()) @@ -79,12 +93,15 @@ %>
    - +
    +
    +
    ${curShow.name}
    +
    % if cur_airs_next: <% ldatetime = sbdatetime.sbdatetime.convert_to_setting(network_timezones.parse_date_time(cur_airs_next, curShow.airs, curShow.network)) %> @@ -109,26 +126,31 @@ ${output_html} % endif
    +
    + +
    ${download_stat} % if curShow.network: - ${curShow.network} + ${curShow.network} % else: - No Network + No Network % endif ${renderQualityPill(curShow.quality, showTitle=True, overrideClass="show-quality")}
    +
    + % endfor
    diff --git a/gui/slick/views/partials/home/simple.mako b/gui/slick/views/partials/home/simple.mako index c3d8baf3f7..8fa68617b7 100644 --- a/gui/slick/views/partials/home/simple.mako +++ b/gui/slick/views/partials/home/simple.mako @@ -29,7 +29,7 @@ - Add ${('Show', 'Anime')[curListType == 'Anime']} + Add ${('Show', 'Anime')[curListType == 'Anime']}       @@ -75,32 +75,42 @@ cur_total = 0 show_size = 0 download_stat_tip = '' + if curShow.indexerid in show_stat: cur_airs_next = show_stat[curShow.indexerid]['ep_airs_next'] cur_airs_prev = show_stat[curShow.indexerid]['ep_airs_prev'] + cur_snatched = show_stat[curShow.indexerid]['ep_snatched'] if not cur_snatched: cur_snatched = 0 + cur_downloaded = show_stat[curShow.indexerid]['ep_downloaded'] if not cur_downloaded: cur_downloaded = 0 + cur_total = show_stat[curShow.indexerid]['ep_total'] if not cur_total: cur_total = 0 + show_size = show_stat[curShow.indexerid]['show_size'] + download_stat = str(cur_downloaded) download_stat_tip = "Downloaded: " + str(cur_downloaded) + if cur_snatched: download_stat = download_stat + "+" + str(cur_snatched) download_stat_tip = download_stat_tip + " " + "Snatched: " + str(cur_snatched) + download_stat = download_stat + " / " + str(cur_total) download_stat_tip = download_stat_tip + " " + "Total: " + str(cur_total) + nom = cur_downloaded if cur_total: den = cur_total else: den = 1 download_stat_tip = "Unaired" + progressbar_percent = nom * 100 / den %> @@ -116,6 +126,7 @@ % else: % endif + % if cur_airs_prev: <% airDate = sbdatetime.sbdatetime.convert_to_setting(network_timezones.parse_date_time(cur_airs_prev, curShow.airs, curShow.network)) %> % try: @@ -128,22 +139,29 @@ % else: % endif - ${curShow.name} + + ${curShow.name} + ${curShow.network} + ${renderQualityPill(curShow.quality, showTitle=True)} + ## This first span is used for sorting and is never displayed to user ${download_stat}
    ${download_stat} + ${pretty_file_size(show_size)} + <% paused = int(curShow.paused) == 0 and curShow.status == 'Continuing' %> - ${('No', 'Yes')[bool(paused)]} + ${('No', 'Yes')[bool(paused)]} + <% display_status = curShow.status diff --git a/gui/slick/views/partials/home/small.mako b/gui/slick/views/partials/home/small.mako index aa4f621c21..83769c636e 100644 --- a/gui/slick/views/partials/home/small.mako +++ b/gui/slick/views/partials/home/small.mako @@ -29,7 +29,7 @@ - Add ${('Show', 'Anime')[curListType == 'Anime']} + Add ${('Show', 'Anime')[curListType == 'Anime']}       @@ -75,32 +75,42 @@ cur_total = 0 show_size = 0 download_stat_tip = '' + if curShow.indexerid in show_stat: cur_airs_next = show_stat[curShow.indexerid]['ep_airs_next'] cur_airs_prev = show_stat[curShow.indexerid]['ep_airs_prev'] + cur_snatched = show_stat[curShow.indexerid]['ep_snatched'] if not cur_snatched: cur_snatched = 0 + cur_downloaded = show_stat[curShow.indexerid]['ep_downloaded'] if not cur_downloaded: cur_downloaded = 0 + cur_total = show_stat[curShow.indexerid]['ep_total'] if not cur_total: cur_total = 0 + show_size = show_stat[curShow.indexerid]['show_size'] + download_stat = str(cur_downloaded) download_stat_tip = "Downloaded: " + str(cur_downloaded) + if cur_snatched: download_stat = download_stat + "+" + str(cur_snatched) download_stat_tip = download_stat_tip + " " + "Snatched: " + str(cur_snatched) + download_stat = download_stat + " / " + str(cur_total) download_stat_tip = download_stat_tip + " " + "Total: " + str(cur_total) + nom = cur_downloaded if cur_total: den = cur_total else: den = 1 download_stat_tip = "Unaired" + progressbar_percent = nom * 100 / den %> @@ -116,6 +126,7 @@ % else: % endif + % if cur_airs_prev: <% airDate = sbdatetime.sbdatetime.convert_to_setting(network_timezones.parse_date_time(cur_airs_prev, curShow.airs, curShow.network)) %> % try: @@ -128,35 +139,42 @@ % else: % endif + + % if curShow.network: - ${curShow.network} + ${curShow.network} ${curShow.network} % else: - No Network + No Network No Network % endif + ${renderQualityPill(curShow.quality, showTitle=True)} + ## This first span is used for sorting and is never displayed to user ${download_stat}
    ${download_stat} + ${pretty_file_size(show_size)} + <% paused = int(curShow.paused) == 0 and curShow.status == 'Continuing' %> - ${('No', 'Yes')[bool(paused)]} + ${('No', 'Yes')[bool(paused)]} + <% display_status = curShow.status diff --git a/gui/slick/views/partials/submenu.mako b/gui/slick/views/partials/submenu.mako deleted file mode 100644 index 38277286c9..0000000000 --- a/gui/slick/views/partials/submenu.mako +++ /dev/null @@ -1,26 +0,0 @@ - - - diff --git a/gui/slick/views/restart.mako b/gui/slick/views/restart.mako index bea94e2127..8e91bbef85 100644 --- a/gui/slick/views/restart.mako +++ b/gui/slick/views/restart.mako @@ -5,6 +5,7 @@ import sickbeard <%block name="metas"> + <%block name="css"> + <%block name="content"> <% try: @@ -23,19 +25,22 @@ except NameError:
    Waiting for Medusa to shut down: - - + +
    + + + diff --git a/gui/slick/views/schedule.mako b/gui/slick/views/schedule.mako index 9457c19ece..7ff0fe2a29 100644 --- a/gui/slick/views/schedule.mako +++ b/gui/slick/views/schedule.mako @@ -8,8 +8,8 @@ import re %> <%block name="scripts"> - - + + <%block name="css"> <%block name="content"> +
    -
    - % if not header is UNDEFINED: -

    ${header}

    - % else: -

    ${title}

    - % endif -
    -
    -
    - - -
    - -
    - - -
    - -
    - - -
    - -
    -
    -
    +
    + % if not header is UNDEFINED: +

    ${header}

    + % else: +

    ${title}

    + % endif +
    + +
    +
    + + +
    + +
    + + + +
    + +
    + + + +
    + +
    +
    +
    -
    +
     ${logLines}
     
    -
    +
    diff --git a/setup.py b/setup.py index cb60307c32..a5111937b9 100644 --- a/setup.py +++ b/setup.py @@ -23,9 +23,6 @@ 'rednose', 'mock', ], - extras_require={ - 'system-stats': ['psutil'], - }, classifiers=[ 'Development Status :: ???', 'Intended Audience :: Developers', diff --git a/sickbeard/server/web/home/add_shows.py b/sickbeard/server/web/home/add_shows.py index 82af9f3081..8a6e4a40b0 100644 --- a/sickbeard/server/web/home/add_shows.py +++ b/sickbeard/server/web/home/add_shows.py @@ -418,7 +418,7 @@ def addShowByID(self, indexer_id, show_name, indexer='TVDB', which_series=None, if Show.find(sickbeard.showList, int(indexer_id)): return - # Sanitize the paramater allowed_qualities and preferred_qualities. As these would normally be passed as lists + # Sanitize the paramater anyQualities and bestQualities. As these would normally be passed as lists if any_qualities: any_qualities = any_qualities.split(',') else: diff --git a/sickbeard/show_name_helpers.py b/sickbeard/show_name_helpers.py index 6c0d024950..58cb884b4c 100644 --- a/sickbeard/show_name_helpers.py +++ b/sickbeard/show_name_helpers.py @@ -53,9 +53,6 @@ def containsAtLeastOneWord(name, words): Returns: False if the name doesn't contain any word of words list, or the found word from the list. """ - if not (name and words): - return False - if isinstance(words, string_types): words = words.split(',') items = [(re.compile(r'(^|[\W_])%s($|[\W_])' % word.strip(), re.I), word.strip()) for word in words] @@ -66,10 +63,9 @@ def containsAtLeastOneWord(name, words): # If word is a regex like "dub(bed)?" or "sub(bed|ed|pack|s)" # then return just the matched word: "dub" and not full regex if word in resultFilters: - return subs_word.replace(".", "") + return subs_word.replace(".","") else: return word - return False diff --git a/sickrage/helper/common.py b/sickrage/helper/common.py index 6927bbbba0..aba7f8b972 100644 --- a/sickrage/helper/common.py +++ b/sickrage/helper/common.py @@ -332,7 +332,6 @@ def episode_num(season=None, episode=None, **kwargs): if not (season and episode) and (season or episode): return '{0:0>3}'.format(season or episode) - def enabled_providers(search_type): """ Return providers based on search type: daily, backlog and manualsearch @@ -342,7 +341,6 @@ def enabled_providers(search_type): hasattr(x, 'enable_{}'.format(search_type)) and getattr(x, 'enable_{}'.format(search_type))] - def remove_strings(old_string, unwanted_strings): """ Return string removing all unwanted strings on it @@ -358,3 +356,5 @@ def remove_strings(old_string, unwanted_strings): for item in unwanted_strings: old_string = old_string.replace(item, '') return old_string + + From 188519250ccc7482eaf936be10c6882a067f128c Mon Sep 17 00:00:00 2001 From: medariox Date: Tue, 28 Jun 2016 17:16:44 +0200 Subject: [PATCH 118/134] Remove .pyd file from markupsafe lib --- lib/markupsafe/_speedups.pyd | Bin 8704 -> 0 bytes 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 lib/markupsafe/_speedups.pyd diff --git a/lib/markupsafe/_speedups.pyd b/lib/markupsafe/_speedups.pyd deleted file mode 100644 index 266e39910664a0bf86c20be513f7b6ac61723e23..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8704 zcmeHMe{@sVeZP_{V;hVdf`UnKc*Zu+0Y1idhCPMM!Zuu`&(y&N93T}!dd3Q}{<`1$z4w0KS5m{Kz0AfK%RtvO#ts5ftI?nT_2UBAGv^viK2_M<7K}+9(a83wuU+!{!r_Q2ZBwLZJS+vnQr+j8r1nTaDb3BzDK<_2 z;ESUNM(1@8r~Ac#Wq1VRi-CKFFYEN(!>9Cd=J2m{IA{2h4rl9dUC`e~x}W4}V?ASm zzp%6K&5|~zWyaV7X}UEV&Vw@Bpz^tZc{;w!LV$29V|IF=r6Jpj0R;=q;227#E~s~o z4a)vSjI9N?vWT%pQckxT#_IG5O#+yU)4bmWN{sQzcJ9zERh6A8Xv=f)AQx%v<~o4s zO=4_oX*A$deT;oH4I~g)7WypoNv#IQqtp;%Wh7UIPPXnvpVVp?>n-gt1obt7pa<># zNkTwDt|^Twp@^Tc2Z==iqTovClUfa9t0oOUMgI>XP}9V_E=%2MvydAj=2rkK#Jf$pgCKHd=HW9S3SN1l~4L z&A9?oa52QYd*hkB>!`H3pC_FQ$)Y%>ISYx~Wz%@V{wh@BIb76cadARxEISCFz$1;5 zMypxB!QJd$@7_RRQhS$Og)uE^q=EMAetm9@#@hk5dWul}2+Ip)_f<~U$&#k6*h z>uhXcYuET6$L@6;=;h)oua1?|{IAHiKHh!Y@zq}0mf~GUtfQwIy066V_Gp=^ZT1ZCcVJI&-&BZ^r?sdap zHVpEvOS-|UFvyclR>ZuJjA5;RIakv(E@-V-zD(a_4?pFec)`=2XE3K=@jz-#0lEXGR+wWi|gC2 zmQ|e5kMAE4scRZmJml#98Yzj5nEbT1u8f?;qu)5ensXXNWwANS?Ku2*FFezXv>NvE@bK<+aD^%chl>WZZg zkX1BiayqT;XjQ*fvH+-7QjDjOQx7rhtL7{s32_YJ)SM2&6ZRL#X2Sjr0^R51_GGPP zw1{^VvD9lua=QhP`g;R^7xSdP2^{HI--VMWxj4!bEm=I#md6u2^X0@=i87FTBjun9 zT%ZfG7;&9*bXUOxVYg!**MQ@hB8;?Sb>eEm{yu`HaFJ0`#>l&ttkFE~>R0bQl+sr< zCx@mR;>!!ugJpg1PPU7mvBXtGkkr}BY2nZr& zHv{#33|#~23?ohz*uc?;aFDK(E6Loq0S4?{kfE@qW`gY+i0_8Lj}3tr0aDkn8w2*o zAcd5~TvzG(4DhGz8$sdKE876nLRj+xNj(K&On6s=IC_}GJ>rz+tOrN4+we4AUC4qp zT!e}_UA;zG1n^40sd9VitwOh9;%s&3XUpA;GgamhW8$VPe)VH=HqnwwIR6%! ztZL_XQOw12D$avE@*3EPn`-a6X^mHKVV_Bh!A;822E|YUx2F5NdN)$KIEURyRU((9 z$&TGgC{M7QSd78y(k$@@@K#$6Ud0(w`z=;CF&IP2kww9Gun!F|K0VEFYj1RGa744k zS4>##hwzP|`;Hf#5Wh&s#S*W{HW$rC{+#rb?sTSwyx6mWTk?zGcL?dob?a)0Q5IDj=BMZpEYH<|>-a8_Y#whN| z+#r(6$aRPs`GbpJ;E+EFqUOv*mCX@9=!|Q47gLgcDicTQ&kw`GeectfKcz)$~y? zKI8CoxZYa*NWSB_-e32hl>P1pkw<xGy#T#$eY4%cxb)?}(aKGLCGZmE6>yvyou6l6&;$K~Ts(9jZLuwekm` zvUsNE^n#HtP0hIH8*vx*TlbJspty|jpO*K$v+Ns7Fs8)BZww+C4ud`V1W(FTOb2{1ej_jx# zk`o1C%;vfA2CxvhrC%m}%dzS+Fr<|n)t@ix&lm8lS&tP|=YT3btqU&VoIxUm-{(YQ_L5Za#$oy~bVYaog^K;uCSNy7XjiS0$HQJ;`gW>Ir)mj=mJ?$NlsOmL9 zLnIInDeO8l)`x?Bd@;SgwyE4(rx5CHs`Yr-EZbDx>S(0hlw}{lo;g#kFBDoG5Bt?% zBn*-}x;@5N$!$;4Dr_%n?5vMQy|p357iHs9Ef~gFW`?)1)4MhjR^WNuk1uT<&{`FV zguKn29SWNZt<|B3Pc?>YBdsf(9zBJT90_kXnCs!c*Aot^##$@Ae^|*nI@PvFxO_!v zAQVE%$Y@vEV+v*&Bh25?i9)(*L+!fiGF@PW#jAuVAl{BhFsv$3w$|!x$9f}vuRp4? zpIN*?*axGXoxY8G74~9AYgADft4do4c*Ai_xy<4X=pml4c#(O1?Xm4%WoHn6Jj%S@ zT5qgF@dsOjes7yE90*}rwu$H+9WDM2b#*XAp8Odoenc|tRS_8Y(HilFeeFsNDEMI? z*7v;48xOD)2y_5b?UIyYf^eMnnH@w!7j48V9d8Iq%^lh6<^dEo&Y|~ zO`Z2h{m^>RhIu~+Hbk^U{?B614lw{#!E z_$~Ac=y@o9#sC?kP%43VN367?Gr(xGCMe)8EUKt<8L(Q1zV;3shExJ<;dTSI`=UEw z!`F%(ZP0Pz!n(IjZ4Y_9gex(>4+nHgGiZxrpOKb5BpF6>a%v;tCzPlvscnknZ}UZc zer#7vTDVktaH;grQfbjr3A-*WpdXE5`t_A!k{XeCbA!B$e8`C@e-g*JRAQx)v_7W5 z5`K6Jwo09mxD>{6NjeaWb%cDK68-W40iPH z7?Yxk8jpshCw!r}BKcyb_ohRo<|>W#d1KhIFb=O719N=GvgP0oMKTnmZE@WZ3PVhy z%T6Bq!jjV7p>|H1BNk~@af0>qL{m;4FOKMmP7kdSrktDxALj6-qh=~nWP+*`jD;7e zk}pKZx)aN6rkH#I5%Q&X#tb%MNx$47P~;z?xy|R_K@OVs(u;>ba+y)z;Qhag&LMpq zrMA_i*DN>a`)5z(H=6W)CcJP5f4@mLtbOwH_PH#be601tpNjwVN1juUNoQKm(npg| z+ms04$Bt^$hF>p4%>tONDG5^DxF7V%^&{1hSMYn_Tc8u%gt~7G_~iPJ>c?B?Sqluf zbT+n#%zlWz1^ABub5ZLBNCvPTb=)4{^jmE=`cdG| z0oI|mI}4oPW^@U4*A~F%(65sW;7Jp2ypOTJLT8ZK5BLT;$)5#$8uj5~;8bV!qwj() z)tFQ_5>C*Hx{z?H50{(xlz9Gs8%T`d>u|R9+j!vHU@Vc<^zfdRo~=D?Jv)1L_3Y{S NdJl-7w%^ML{0B>`@Z Date: Tue, 28 Jun 2016 17:18:32 +0200 Subject: [PATCH 119/134] Add pyd to gitignore --- .gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index f0a536fae5..4bad0c62da 100644 --- a/.gitignore +++ b/.gitignore @@ -25,7 +25,7 @@ server.key # Compiled source # ###################### -*.py[co] +*.py[cod] # IDE specific # ###################### From d9c47da62681129508ce476b891966225e7ed8e3 Mon Sep 17 00:00:00 2001 From: medariox Date: Tue, 28 Jun 2016 17:36:39 +0200 Subject: [PATCH 120/134] Remove all .pyd files --- lib/sqlalchemy/cprocessors.pyd | Bin 11776 -> 0 bytes lib/sqlalchemy/cresultproxy.pyd | Bin 13824 -> 0 bytes lib/sqlalchemy/cutils.pyd | Bin 8192 -> 0 bytes lib/tornado/speedups.pyd | Bin 6656 -> 0 bytes 4 files changed, 0 insertions(+), 0 deletions(-) delete mode 100644 lib/sqlalchemy/cprocessors.pyd delete mode 100644 lib/sqlalchemy/cresultproxy.pyd delete mode 100644 lib/sqlalchemy/cutils.pyd delete mode 100644 lib/tornado/speedups.pyd diff --git a/lib/sqlalchemy/cprocessors.pyd b/lib/sqlalchemy/cprocessors.pyd deleted file mode 100644 index f10b2386c5fae88273edbd3f51087a80642455b4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 11776 zcmeHNe|%KcmA^@t5Prl7KL(>1eMtBiS4qaJzX z|2-R9ao_bfTeMMju{0xd8e_9TIp3uA769gPe0>H1!ZR7O(F>0SSp*#l4jz+Z4iaNQ zP4Al&se-Wva5t7Q=BB~cgN|uX5L)1`#rr(4gt3RY<7XaJtSKEDSoh4ri(I6+i+KR! zkHlC@vFi6~UdB{}he0Qsjy4Z1<*7nC6q{m9CO!yUP6AAMsu=4pj+lacj+iSC+Fzvz z0fk&uv8n{aKF01PRtp*h_j0t9r;4#_Q@UTFUqA%p_^BdUx79CTOs*V?*=2*FdkuBt zdyDqS`hLSvj$T(kc)INhVnBp=@v#g}yL1B0h|Mq@7ZM3W&Xx7t^9k>9{O#8O^l64; zHu`1#T*JZBAJ`a>APHY)n7%xPF8LuFi~b&H+7QV9aH9OZ5Fz*mPO1&Y{un5ZvJQEqA*LtK^>h3)eHQ=08Xc zXyruPNHlaxXpoz*BEp&oleiDRy^yi^SoS(McQL>{;x6`bw%c^^hXi^Yn^Rm|CS0_4 z`3%E2-A$M7ICd3dH98E}=rx5+b^5hAdQ)LD2ynUvPV2T=qy%_51Udjq^bLF-K#PEq z`#>e9(NJQBHFXL3dXSa_n1i_<%@~ln9#0Y{zPC!O$Gy}|uE$zFEM+}>oRGVf0M=tM zfLM=j66s9qL7L5_a22-EdXyxC+yWg(Nk=l)cVXj*k7cbh)8SL}L^?dpV|{`Xau=EA z|0%`%Ys4Y*dRm-e9&wIdX~wyk;@pBb=ONB*h%=l)AQ7SMq(DTn+zd!+Bu$*hvjo!Q zC*qls*8%Rp-VzbVtEroe;}R~O636+Rkh_{dk0Uo_BD08erZ|#j#8IZac2gXi&9L=2 z&f&xHwJg1WM^7`O_bc>7^nS^sw~Z5We@mdpk-&anE#9L&gY*#Pqo}TVa9Mg2}LCJuc_Qr zUkT^_N?6Z-gtQBe=tonQ^dV9a-zy=55N=e9royT^{YAu?JE`Sxqr}#$^IZB3`4pdW z8PTb#i}y=)@d3$joR?IRZamfgWoNQKFF903VX*Js)Q1pV5S=L-nX>-;89m#A0~jWK zR-WArSyM^UGZBw?XCC5xRGTmBPLyp_SAcjqqguO9)~j+*YvuS|`HUp0^SJ~-x`bJm zxe3|WRF!es_F~2#neaTUQC>^P%hi`svQT={2C)CU)O_ISNGvlS@CBIpAPIt$T;43HuYR_b;GU*56NA?CnIsV&A%F7aT>Nt}QG#9A#W?jK1U5cR5a)a<}d7%jo-hAGEH1`^G0ISi0>sQ{;C3 ziQ67UMKT;O36bwZ1fTz3;{c5HZKk72Pwscnt1WwxhY+8?^oJ}OvW3)`2Y?0$t%gw9 zVZ-qtkqz4+o*K3n0HEk*(x)GQM?V5)IPNxu5^xXJ-UNWq^`?;F@N&(x81cR3RJ6=k z?qXPNoHKAJ=ea#>INl??+txuNyKMmiSV>z?b;ig-Ilho3_T!`v=RO8VY%}q<;eFx} z;8saEcu>~&%H7R5a(7#v+})Ay>TZ#oE1fqvZ>AJ1GwSu@F8yTX5qnoTJkV`N5thn9 z`wte9Kl^I*_quH_fTYO!ltpj>-ZkfpCE;=fbONo@UE$R&))Sm zIeFjGSI|Fs1bA8h$$tZgf0SYG8U&Jzlm6c6k@~y2$w)l_1&EYQ1nMqyD+jfiW}GUq zfFsu+Yd#@YdUD?Z7_>D)hQgYd2X^IPYy$-Lm;w}!#2>L5gSN$xqF-5+@6ne$DBowh z1QfYqX*qzl0NTC=B=LO+!{DrXeeZ4(uhS114hJ}f?Fc@aon(kU|0-Dz7Rporu;Kbq z@lcwa9_OvjZ&B_hb1TAg%h`mSDyh>)^^aWoo4+6(-OY(^hXRdVhhwuV9CrbWO)onJ z^2z;RBW{{4{!wNO$86g?nhfn4#*jWtN1(1_+8meuf_?<6lUVYpVOV7Qh8_$~vI;O6 zlNC2IVppI@ejU8B!!#@dykuwe6T@)n1y5id>Y;pw#c&$0bQy5O$ckN+)Vc@49}drd zap3{|C>;#73*e)Qq2wd>S<^4e*O6?@YIUJBXp7Rmk4Hy|+q=V~4|NaBms z3iJ3^Sf`KS45z=3XvA!ZDAvQJ59&zamqZ2D%!2PZssG6^DDFME6|i#h6Np-8hoi`N z7e8|R0Ji}As4mCH@I#INTzCq!k-yoMgRmurF{&5R7jo&Aq_mzpg;SOb3f3OKh1#g6 z@1w1-ijE%Ti3c#AiX#pf*qUT6ZTmIVIXZF}jw?;dGoY9TZ{Y*>Q10OHh~J`#>*$9F zg4_S$u(1#HqCE$x0_X5>$UOz~=RjM}(zyU5=g4{%g*?+3lH+@FB<^#+MlVKTWf!&Is5t^7t60jVqK`;IMY>mSe>prbF!Xy zRSv0kr!gSM2a0Md52$aE1$`jp#7Z%7=>s^THXJt$ob3|dMu0Hqi|a2NsMCjtvXMdt zl3lMV97tUB;ZXc|#z-cw^N60Ys5uX5x9A`Edjo&p!QX9olYUAroSv5cGu;)%vJJ;n zFwE~f8UK7U{y3M=))w~{mN5BOrJLVa{1aMTDp2x=IBnRrf<1Bv9jkHdc-amfofNP? z0&lK9j@}gfeiDInxy6iw;KG=4SJ^RnCuW8Rl!e3c&N5-nRr!X?UNh|KE+~ez zx({Cgb~(AlKPtZTnDxC-WGvG7yOqD2@vd5l8B4PMU$Xvdxx2nd)-&{+?)t*SyZC-6 z4SDq%&w!--pzU9Z%&*s!N;1quegJNsVfewM7z~xYcuzU@xqkbD13Lbh;t2Vd`TVBb zwB9e^4gnt$@Vf%40-h4k^11IS6KfIl1_5OO3k94l;73CKlz^`ZxL?5M1$>$As)mmi<&_35TkH z4Qc&S(NBk#Txn*@0$V1tX;WG6xs2^bD=B4c!OSUa=8|k2CerA1@eCRRFIArVKsLy? zH5gAiA=97F*v0t8vVnAB8max{n1Y$CVDAi8piO55P19Jxk{ni0dfw={#~njL#(kb6 zhdID^_@^-k_>P8I{r`>QCJHu-<32&bMaHOP!gc81j=n?&@nqPuncY8?+2N19bVg(r z#SOmFcsjkz!39qzT7@M)eF~cn+3BU(k!d2-RWskV+w}Fd>UhRPFba>VT*D@hP$&aqZ(UebaH6F%hLMUT*jO()WV_0M0yWmYdO(SD}zzoEUs=dMBvRKHc1;QbaU^~j#x^aDC zO3#79-GOJm+Zo#i4fSDvEU0*#e!r=Uv1?$f+8c=yxvo7DRuPMpSy+O)P(U-qmw2vR z!d@V4oe{;;pm_bA_@4@^f1MIi)PQffawm&)YHi_A=~c!4U=aTSX^TejU!$$~q0x9# zbwfo7CtsD}Q9={~Pb3@&X^P60XL{N(qp;88gYlO#JONk+qn*7xj(ZgL^C_*Wg6Nc6 z8vLG642BkGc>Fx5NsVtYkGDN~n@8ygU^d@m9#6F=8c}?K)_~8`<_-CSFw0gEJrZg5 zMYLssAbD~ZD88WL4S6)0X&}@Z_Jq9cN)#ygAundNHOmtV!P(nc7o;&!WF47Vn(5)x z6PX@C>7UXVRFnvNi`4|8-Zep`sZB-fT!A&JS4GT&!6@6Fbz@9XJKc)fN_iOaDb?W^ zkIEyN*C|?4z;`<*Fm@mV{c8G1LYk?l4qwY)3e~NKeM&S6@_HKE*csK7_NG9);*4T` z3SsPq+J@z|uCh{dh0^P(^?%6?sJB%6ttO-ILUW_h9<>ILF(3Z_n+%54XmO;|&uH0F zXz9wo+>_LC%5>1NGXD#7B zdld~4l%l+tNtY}tE-fjEULq})?x1=m;qL+Hj{v+v0M`iSvuH_d@uE`RB*3dhD_(SE zjIC&7?6d7$Etz&sll$Dhq}xujo1$;rCe0vCj^gmSVvsU_9+1iPO|;941Z^956xu9M zWUfi3ub8)V`IX;zq31;zI3{kdw(up`)W)-@H=BYt(}!$I|MylZ+br^ ze{duTU76Wp@@h^0Buh|0cm5X}$24=vu9ZLK!mlLbERmtJjPGRX zB(YwT@&B*%iTfO{UnpDHnCLIlH03r$m9#LnDX|eI_bJplA=fGDB*ai4tzxg@P^?68 z`0!GZ zg8s08qiOs;K|e0w`q>lhj|zH`Y|@A3OyrjfI-mS!yv&~lk}r&mz{wBmTW#%+QQ9C8 z6EFRRB%Kh6+bVvrL`BA>Y+BrkwWi6%^;A3Eb(8H|lhq~j;*zdXtt;C@-;xfuzKW|= zT=lL)OW=fum@#cOAaKNo{c;ZWYv70lJ24mTl!6D?>p)!vjy;^^&cmM<0+#@PhL#T; zdmzihosdLyz;$Rvz*Fyj=+0;utsHcMb8&ul0VjApnjbi7A=`nr7I^9%5Zw(uh;~2d z1b>3I2{`V?*hgqPfPVtG5_dW~fj0y8;BM!2;PiK&htTrN&g)O>EqWdAj2{s6v?t$(R zIQz06+#k(^4?7;is|v9K*!S4bW0B1rn>TLm-Td(8zRl%Z+*^jWBo(YS&mi6_M?kG_b~%Dpk90Ki~WA zn@oOC@$5gl=lJx!@4esq{dK?hd%xfNUWTT-x3g@**(W|KCr)#o^%T-*Y&?;o5+2Bk^9y(&h%n z{NHf0=11 zz)tO3n60-m){4Ii{mq@}A8@)Opo`}!iEMLhjgnwye3oL==9$eEO5&?UOurVOnCVvm z$V|Tspp5ASiUAX#Oc+ye4~a@jK`wf>De*o;#`6`UE>AIzLLN*{vkY06$9SU!b8b_N z*Bg^7Q;P9?WADmgG*7ypq3wX)5%gq;z8|8|?MHW{$vBASem;qx$mAIPONv46GBGoP z#y3m1fnVvZD>9{TaFRGlso@3|bOWOpu=A&AB#xMvM%z7@QMbiKQbdzn2Pwsh;k=8O zmb&xw*$|WuL1RBxOb6*xXgQieCWEXxlPsIaz9qM~glv7Y zoQxbbrM_#Zah}#`j$#Bcy~Kd59;$j74xlBkFeQUX60=M!JGuNWxcnt^ZH3ZvB7Ujm z_lgWoC0Xz&3GdZQt4--HE`A*1^?RZAdt5ET<7O^17c$S2%sV0TIG1Vai$J%H(;YLV zSwsihO347CT*0+d08I^mvCE-a1*69}>M{Bi<26${iJY$(u!N!`)5^)N;k{al76e^a z5m;VFP#W$c#e_P5L^UJjM<$||hpmCNBMN}&6o_cZz_M`)2%aVb!A=xq7Dd@K3M;y4 zLGDq^owW|eGR>ydDZb0y>Rv~7yNwZ#aY`}%Xi9%uf@n5|JVwftzKyo0CZ(4vM!`IC zMpCN3iLrW`%IpT?m!=d4g|&*!q{)4+`^)Mfij~@)6MA8zfkn6*B#Mz5o6CExR!`|U zOBrMIIO9@pgH*%?ExTe$)trWw-PL`H2(as22X^j7^NLumV%%3emZK!^D{e%9PYldz zGW?|&X-o`YPc%#nfMW~i*izh>IP2Kl;JT(3Q=D6>)D--BK4bdjiZQRDs^64eLXU!N zwbDClY_4Kt^Ie;bo{~6}2f=wtVi54Gqw6^(k;_ABIacsEm6f24q%m|oFhfJkS zG$5NHBpSecj3=P6=QCGlXh<>E$coWe47)~~|JyVtj}8rqcoS6a{a9X>S`0!fy;9o5 z!Xt5DAAXJY;ZLu^+z|F+&$K}EQQ9-cpkjc)BA?Jgx)pO#54a|f87+#RJX+`?TC|Bs zQjFX48mdk!#)n7{?xbGoP->hn;b0KI9_7Q7o<-9bNt`!bJ%?elDg6@=e5Id87nA)G z3OZQ2iRD4E!jvLJ!mEb^dZ3ch6968hG3ouZWv=d8(=_Xi-yNU> z3Hesq2YzfLKDmV$Z0I?yBN=ljH#Ebv3LDczSLH${t@lol)xC!!!I{UAc6 zIDF5lqFh_`20ZSf84Gwu{pMR!>3KQ62*zASmM5j3Tl!4tR`luEF;UDBl+V$qRgwp| znUtI3^Ee$ZkLk(}QBQGEcKPondZa(hpLX6vsXn2XAzt{@Q+UTnoOkH+VGpNIN?UAu z9tS`4jD2xhuo?|Tuma~ks*p_xjHQeYh;DvBG-oNjTZ(M2Sv5Ru~+qf(WSRr=gH^6LH0sl z+TD%v4ED;OU@uN8WN+E0vRAk4?ZoRS_wPTTG1b3Ll}2rQzhc{4mSHcBN7MX^`1>>d zO`O58&)InyBqs}QqvYG`4BtS>c@1zq{hJ$650>9Z!;%R+s5Yb5Q1!xKeX0Bhd@C`f zYls_8yIL`wA6UA%z?=%6)XN{gx8>)c9A zz0Ugy;P~uJ);Y!&Dv5J=L(laK43E)eO46wUF#NQg6J|>pTvWrWiSk=#5`7 z*5{2CDK&@mE1ya6`I?uIx^czzlac{Y#@`;xb^ij~5cW9wiSt>y>lvKMS?V4qW1~OB z5Mps3hD>SRrBp3XD$v;dcU*eGHa_qngfTBL{^}J0+KCY79ER04h3+)VxEPcTjo6c{4D|(q**(`T|J11&!3hAcn$Anv4N{ zcHyr$h8Ana_7$rSW z)CA#k>;bp=N{QiJM;0aV$Lr+}PKj$%ETc!{FME z{qYx^dV8AMD45GJUl$md4$hs_GmePQKdq(JslNd~wi110!0Y-}PVIc1r)*DYtto{p z%yYn43R^hCA&MQmoZy>MY6bkGz`6Y&jF?A&M|sFNYb8tGb3sLN*%^SbLV6`bSY(;Q z^g^c8ycs=f`Gs79_s(>UPL>?C_y#V5k4mdz9DJtfZpJ#6=IPm0{f88va;uVkq^kd+ zrSxeyKKH;JnBK8;O|k2-{`U?Id3^3$k?TFi%bx6mZr9Vbmo>PaUY~nM(!+`y5Kvbe zYvy`tUXJGDK$88ur)D_na+?R0#KE$LnxoNEq`^2y0U$2&g~vGP!S|0hOtaB=A@vnF z26Mf(>4t-i#xP;Fk*k1m8MUPcQ`dYjoH*$i%i{SRUNaYs&&8TAI>n#Q^5>8Fb0?l8 zpMnbKq-Ed6s<3#jDXqYOwZGGT7vFdc=PQed$zNK*l;$H$?#bW zk3FE^b0FR=U8I1DsVTiWhu*tJ8mms2(r`w52yLRSwg(RZ;Q>#zr|N{VA2Y)R%)$|6 zf0a<@sd>}msvq(6mMn+1#t&XWtDel_*@!PWW_iiAGOz}A9NlowRSB9>Ai>216BY(3Nx*N30`WY$)Kp} zg@>x0_@?c8?4W`FEONN~|MRs3f0v-^Y5@xcJXd4kM+6)e@D4F!0!@phky?V z=oio<;B5llDByenFB33Vzz@G_P@5OAA-Q2|rJ-Zs&<{eB_nFJac- z&n`r80lXu}!E$bAY=JY2IhWx0gM-vx;@(yX936M2;2e2bEbpnD3P*kp%WutN`IWg5 zi#L}soRunZ_Y?2j95&ZK3;dJuj@&GkTbWhiD9mMrOY&J^W&UtpzmNfW!#JAXh+{sh z5;pl~^S%aL5#caI;}d<@3>r3t*Ku#eUqavsv+yJXTzJ+3=OmnmbWHu<_8vTPre%w;75b6AO<&q`oh z32ZB=ykh)wC#7LQ(|m=L$0UqPn4^SoY3(KbKU-)WL|4j;-2{g`i3Cfq0sSh#ljzgs zaOE-=>?C_#m2)DO2))Gb!1xK!y02yIk2ZaVA7+cVuxU4dX9sQ>^@5-sH`ctZI7oeg ztL2X&?7M!v)ojeb4Iqcq0#}xS^HW{}?n2jRdD8OCXL-`{!^P&we`d{H4biCDqUkMq zG!WXr&Jbp^PhA@i>48oSgW)J+?>m~iR=0me^XY1xHyCWv^o?OZn*~~Tbc5RLjmEUr zcqFJHBLY_+09|WWL}Q!4=Ly6_?+-#Y4OxO$Wi@xTX!pmpkWW+VHfp|2jLjBO*M%Zp z-zH1!lI-R#_4arm=)cn&jBBh8avM7%;i#@!pegK+L;d;5c3LL0+0Y`aU!m!4U5|>% zvOi3wUmg$n^guWSV$3VXSZ^_GL6ImmzhdY|J+mh21D}iYY z>8d;C31eYdoaFFD21+2C#^hH9N$-SNnYN zXjJnv)=Bok4%?iE?LKE{5w+Zn`wDSiw1m{01>fy6C;hW#!yW6A|P2Gtg-@2b^HAy znAkds#l}!Tw}zIeUtGctgP$g%uGPH$F2)Yo{S_M5L$MI)!lnsT-cs%l1{u4>p(5z< zb0akp4uoi-S7oW4*jr(r>Wk{^xI+y<2L?LXOA~EXV~^)_L^bTRYFmO|4aFhyCWq?R zCP_TSRBvZ&gQ{%~Aol)>scM}Xi)g+;N5H3U^oIOFim$r~ABn8@MfBx?AX)NNV0`d# zNY!ahflxAB^90(X-e?!4mKfWYy(X?jyPCCV2kjaxL|quk2zKqr z!g{p^e499ev7-+3>nOlNx+SO)kv5otZH|V0S}X?iR^n~xiedA#BCok)n5#xDwz6UE zss>M0rL`{8E3Ez7tfx?e={#WHd(nR!_mjBC=VQ$Q8S_Q8SUjlXS7w{L$|GHVMyr^K zoUY(A%mqELdNpJC9}yPP9#mD{CSp~MX*zbPt^t{GW#VOok9@l~rmYP>h&b5XCCB3J zzMwZ2(_(UGJf_Q#AwNL%B*tkYUJ9feBGR88vxh6gwL?INmTH-QT{V16jQ`HTV<`ug?=nW#Nb#|3&p$&nMh99C) zcnLA2pGTS+#m-0Cq8de+lJ+kqns!dvsa`h$pt0T-G-tr=Y^CaQK|TeqW>qn|mS%|3ix3BmsX|5|cf zOt;d!3Za}C4|ivKU_(3{kIBLeN<;GEMdg(h6|u$it4+%vw3G{EJ=!HNj>j}L=xx`6 z7!UKhAcsR359075^N{JpDwkUtkVt}lnSKX~q=jg*)d;sx1PR5qPQhpWc+?N6VA40j zCM$Xt%U-+a5M}0kXIj6td^K^L^49a;HMY(u$y%(5XrZ60S|y5Ba;gl{TQ}W4cy)?# zpyT=`k4#mT5!Exwoe1^m;w||zQcP3BcgGBNTU%v{Y;O0ok!do;u0kAItHY-HYIQV# zUF8k(O`uyRSn{7bVKa3Z8AraRdYG{lczjb{SgCc%dRQ(?v)za#@@t!EQ}K<>rQjQ4 zz4iTJ=M@&jJM&hmC(Qyc$k;izg+F=Y@0&2S@gGy@h1h`Hid4d=-18{as=Ev zh0fOhNzlrg4F1&P(GcGNB9TV52(~14nC6$;;S~DGb!OI-y-27s`IEMwip1rQ*c2zmVi&P2m;BHpKtkKwx-zgAqF{_RuNsc4Gc;ZGUY z<3$&zcdFeN+mCrWG^%UJ51YHD7HbH}DBoCPdo3r@Jg>h8Mq8J%={FXS3&Ve~k73V2O->Hn9YA1YEZO-w@HJe>?IW+rj9njO}P8f4g&7N{TR^zz9HIe3mN+%?w!E*1HO%$=uZQFw-oV*Hhoup1-Fbg zeN(0Hq|_#ODZYbJo4$v-MH{oc_^X03{FRS2{|Jv-%mrY0*T}AuyHdN(?_#_2b{FlI zcbDz1*j>F_+1hm|tM%Fu0_v3-Eln&kHnf5ySSY}tX! z*iYskS-#Js9a-MFEuK+R>EzaQq*skb5{aa#ZZXufl~CgewdM8>wKo|vYOAU${jTY! z?!W%o$*XTYQ|zZ>!)GQiJ{^1M%vniacjg5-K6vIe312?*YYBfL;hpj5HqxCNX}FcK z*q6O5{LrWGD)P>=Yt;oM^AR)!?ofZS7_eI6J3RylFJa6}AG9K5n=zmyp*b8!$<+mQ z?%1J(uVajZ8(YCxn3RjH31jKdC4hN#k@rEM%9uLa&K%uZ)95on+q)1S3X$gR&I6ci zDr1{#)3JydVeH%UKmvhPpkIzYt2Lo4Y8^4QjeORjQ!GHQ?rI2qW_N>;MubcTs+V7n!hWwpOVjWntv6I!#uD2 z+cJlsaxvO2hHT|g)X>17H7_v8vj-c7xga^=9RQiLIVMg<0M7JX1qcj77pk&Z)%3Uw3XL)%U0ZB5? znSO37KFK?e2&|8hP0GNGJ&3%==uC`9ZDky11Y)$-T`IndY-P)BE6T+>#QLI)^-{Jq zF+!7n(N-S#K!y?YA2vKQ3od@f1^ISr5^Hh;Tv74dF6+it{4^HMqWpkLF|RDDArjVP zs!Fb3BWUv_4LLX?Y1Yd3q|YvO-JT<;nCmOwiJ-5bhlvGR-Sva)+RpZal}6?p*bSYDK70a)Y-Q&xWA{m8 zvvN0faijA{%X@F6aP!J}cMp$^adD96 zx+-{XTQ$%1E!J|I)zF5}XF_+8+ko9JPHAEy_=<0!0TBrA%Lq6);(KHT#WUI>&gHz% zgJf_~wz5v1fL#@n)jT+CF5O>{Q~6;fxjWB&&(!RLr1kn_dFY>^4P%dCoSiB$efx0= zvx4qrY+^51TvUAzEL$0Yt|m@#c-;L>X}xNPWc&z>cJVt+oadszvu{?k1<$58^2Ym_ z`IfCb3@&eMW#$`Pw8P9!8dJR_3CXoG`vhG4rL+{2ysDKLNRdvAwcxv!R|}puzpsgR zHSzl7x7x&*ty~Y5t*n+dyl>-8fV}?}Ah-4vd|QI&d;_mjkPodrit)rNzypI5F9T%X z!9hF%WI9jc92avlwF@sdH&c(m05auu6Ll|!!4Y$zlc$@pfRmp@^=zh4@>OjBBi`E~ zLuoBs4t8+F+6{s09D&6E1uCo&@AZ(v@ymke=$sqj4|!LB!W-AF0Wg=t+G-$$FGCm( z7qyFn2S~h49I=%};Mm?1OYn#$NA6bbaZcN?CSthsvIo_W9{POfb11Eu+DggV3U#8U z>e|G4@s1{5`zNxI>nh|F0~QC4TbDN~_X4vj11CYAcmZtW&Gcs9DX{{$S-*(%(1}-J z9~0v_Y?XnN=0`N~Gw}*mr?BStwp}FqcITly&02!VIIOJgv2Mhr@d@w($4Tw?nBDZ? zJoc_8Y8Js!JB%+VGD7x=0ULqXrPj^UR)-;cVf>?qRvZ?;#EU{TuSSf&z^jsCtS*Wb zW1TF=tTy@85pM<$avM#!XM{Y~aSYP&R2SML^9D)mNYh^}cVU~D692-*TgZmxEhJ^c zBLXG-A}s+(-X#@=X$p7adv;afR^*2xxq}zR?NQKI4Uf^|t3sxO z=W8CI5b|sLDC*0&D5aE_*vEKwxPk{?O~2{ng;Fwnk=|X7Uig2ZS3I@37~0>yk+JT^ z3bQmYILxE`7G63U7~IuJzk*o{4=h0V-HmrF_B}fIAH!o>G;|A&a!nl9N{2$eXPd5Q z^*!5F{^`8N7Pq3Pt`T=E)Pl#;m3Y6EKCcCjrF|iLh-ZgZwFVETPm>2RL`C2>%7-R~ zw0u?l8@Amh4i)Y}axmX(+gA^@iDN{0n9>E3Pc-?53d=4W%bxN~mdG<6nX^}J#=&Z@ z+#VaaMD)#X5l7@K)RvG+zx_P4gpYvS%DlR;tBG>LOyotR-B`8m>V)n$|zt|K+gaEkaF2=fx^ zGx~58IJBz)DIfGbIwbJph$H16wm)75uD#B&z0igK`7vkw?=Bp3;nOaB!iB|%zTu9) z;=&y+{P9Xh;-CwUyYN>oJnO=@U0CMoUFkx<3vY4ZdKac$SoFWg)!*U5dtA8Dg_;XL z>B1XaxYUKrg}?DT7ROy!wEuo^@Cvq*Zs-r%JY^nMwuQ0f-V)|rg9`<}$KHmXS`ONk zW$a3`oUys|<>hR747cv3cntDHC;nW$|1%EvYIC%6~muK_k;OwHpfXTI1x=34_;zaX{9^-KD+rZ?l6^*!{@7Ue6;SS4gD>&sK~ zY5hLMSW$KakG^-%cS4q+oD29KsZ$|!`exPC7=L9MiKwC9YM5=N(ThKG&Zc(6zi6<_K)HR(Jx0{jc_b4u z&2)#EjwiM6ASdx~Xo7R^@R$iuu z`}Ot7gaHp$6n~VR5BX4ftG*$TibS_dUu;7ugxV5uvppHJdW<#tO>0=H-`ti=)Zbhi z>*+ytdUQ+*e@53+$#}vv(rkT+-W%z`pHcN_+GHgv=w(M|aNS@}mUX8MgRzF9 zg_xeOVDjT0J!V{F@ifyTy_v1L(HBRYpJlqf0iTSP$H?jYLnSx}qs_ zUA%`Pxet_Rj}b}eW`D{+9NkGh5$QEDK*3K$5dY()x|Kk%+t~o5F(qV#+^j9pCG|{+ z?o!%2KGS?>W1XD#U|C0xVWik;))LP|w)7aC+tNlPrp33UBkBHTOh3c^zVr^uNcV@0 zbayh{8^OvqCoP$w@0OqhI^)splEBzu55~<_I&CCO$52}gzy8glhSSNYk;#C(lXN@! zGbo_Wc&`!4V6FyX?6%fB*SBhcdS_i`c53l|4i91v(|J{cn+akF3Twh~d3hzOvVa`~2)&uPYrPxm9AQ;&DQL{IEL z(>Ck09e~f^KHmqN z{(tut`rE)6?(C=0%W#hp96=8Q9|ffUQTge91#EEfLAtMT=huO*0yd+Qd<)^G1k)}~cmG}&|C6f{l`;G=l{M_erw;Q4*z?%8AA904ARo3r`waXy D4d3^w diff --git a/lib/tornado/speedups.pyd b/lib/tornado/speedups.pyd deleted file mode 100644 index 120974439e511b576ccfa9c88c8646eba68931ec..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6656 zcmeHLe{dXC9e;P13)iISrP77)BlOx_X5w_n#9Xuqm*5>q;6*nWngF3KY_4~k=4^6z z-tB3l*ePBk9B)o5cGNODfPg#j(eW6fTacq(u(Rj!a3^|%U*Y4;G2jt44q5`Mk`j_4{&rCa- zCv*K|=g{O7#wR;pot)A5O_OhF<9(ASG(0x>riMo~To?5BaNET)ZEGeZ@O3MBs@QR7 zPG^>^b`+YI5K;inQk^@y0$8lk+f5ueZ6?IZA80uwT^R6Uq3IIc$glbY+>LpF(wc*iuF7b@r}zkY z-3k^MBp-bl`n=}B@~G6+NQU#P(0MU<4d*ovA?eD9uBgos3{CHfc}gI`_EbjY-mqVH zr4@+#D|wIJO$#}fM*nLCXkvy^HcV5ZvrELImL^4K1t0c^sU~L?A3A7CIJy#Rm+e=D zZ)5Qcq~n*l(loJ`j!@R&>~bBT$&F5rDr|$0vJ8bn0cBtrIOjG;h-I&#NOJGF;6?R9eW7&JLbjNY(KJB zg>P~?Y5fUzlCO*NXP%V?sEWDjEJ!2NkZ_QyJX3eK|+gVqY;*f26SM%iAP z?6lEjPccnyEfJGl4)5*WJG^)Dur9TgO^9sDeat>kiwv06ia733`!~vXKKq;4nWXjY z>j;r4%PZ$QjIvJKbTM_0C|B>vYE#*ajp<6yQv0tvtr)^ zuO#cocAk0+BFc(hf=CrMz*b}v6cKlPNAq6vD@||{1g-3X$Yv?a(!^<7i+d)zp4NSp zC~v94wUE-fW}^I=vR1fR!N;@RXrV;?xhlf1vdg7MTb(602= zvsXau9*ysS!c)4!l%6h$|`u1Kx7|;G6F7d zWqY6H>MiWBDl|i)S}k}E@<96HX1z}A&3m0~A`3V>v1T1!$$O9YZY-^fYb&CyEpG?b zR8)aJ>Vzji#S2`=!Jaw@G3usR6YrSgF3PO=d@=)R z#iow4OcGmV;J9+R$o8>gcstpef2wMZZQqfC@tmj>lhH*9)fE3E_WdUya~fiUOQt!su_*f{4rz)J>}oq3!?*_CI<6BDLsvv$s-dTMzIJ|)%i zxABJ6@{Jn3OnVmN$^G-iRZJ5Nk6=vV`Ks^)RP}v6SN{^d{>(a!j~i_y#>8?6?ptJ;9WdTe-l0<^35c_+FWHST}n## z7Lrn>fRuJDA*IkSt-fmZ$_ZiI&}q9$un_@rA+Ur9kPGXUr#rAo;GLYhoRB}@|E)Ic zOOAiS_z-YA#=M#A`NST$jM!^{)rFDeMqF;szYbyoA+MqD&gmEAkpk!zROd&Q@VVVY zNKW@M*xZ1ghAu~KE|8lv8=Gd+Ij`OmpwkXQGOKwedduLwc^rg%Zys+eUrXTqr}qD{ zF5~95{&poA3~iRYF`XdyUF6*y4fp9>Zz#|n{IX2Q7c5d+zc;#BYV$>7az{MUD@$C` zL2kFS^-C?Gpwb!+#Czo$=~Fc%(y#P{L)B|51HHY3tTjn;$d8|3q)0dzQsgMP)hzY- zdhxHf&3On&p*UP#Z;}G?0*@z&SeC!&1oC zC&xfS9`Ygo$1PGkgk-mn0cc}N;gA-l!7ORqNwZ{dTHEhvTvu16O*@^}-Yd%ya*{L! zW4=wjaz{^8_65Y?rl>F4--zkQ$WJY|#pP&!n;h*9NBevszuXv(YZZFfe2c7f1pQky z26mYVFoDkXcL3Mo z{;3De@4OiLF8Jj49lygk&GAWt=6BTzgZ}V~_7U>vFn$7m40Ec|e;Rf?TJdP@qaUF; KUYg3H75F!0k`>$l From a2b746c9aeb65a39319152bbbae0debd7b590d1b Mon Sep 17 00:00:00 2001 From: Dario Date: Thu, 30 Jun 2016 16:01:17 +0200 Subject: [PATCH 121/134] Fix freshon.tv login, add proper strings (#722) --- sickbeard/providers/freshontv.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index ba412d0750..3bdf658db0 100644 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -51,13 +51,14 @@ def __init__(self): self.url = 'https://freshon.tv' self.urls = { 'base_url': self.url, - 'login': urljoin(self.url, 'login.php'), + 'login': urljoin(self.url, 'login.php?action=makelogin'), 'detail': urljoin(self.url, 'details.php?id=%s'), 'search': urljoin(self.url, 'browse.php?incldead=%s&words=0&cat=0&search=%s'), 'download': urljoin(self.url, 'download.php?id=%s&type=torrent'), } # Proper Strings + self.proper_strings = ['PROPER', 'REPACK', 'REAL'] # Miscellaneous Options self.freeleech = False @@ -210,8 +211,6 @@ def login(self): login_params = { 'username': self.username, 'password': self.password, - 'login': 'submit', - 'action': 'makelogin', } if self._uid and self._hash: From 9b0e7afa3baef8e2565dd6f73b92a1676b54ebd2 Mon Sep 17 00:00:00 2001 From: p0ps Date: Fri, 1 Jul 2016 14:49:32 +0200 Subject: [PATCH 122/134] Update lib simpleanidb (#726) --- lib/simpleanidb/__init__.py | 118 +++++++++++++++++++++--------------- lib/simpleanidb/models.py | 94 ++++++++++++++-------------- 2 files changed, 117 insertions(+), 95 deletions(-) diff --git a/lib/simpleanidb/__init__.py b/lib/simpleanidb/__init__.py index 95829751dd..d8223b305a 100644 --- a/lib/simpleanidb/__init__.py +++ b/lib/simpleanidb/__init__.py @@ -1,73 +1,75 @@ -from __future__ import absolute_import +from __future__ import absolute_import, unicode_literals + import os import xml.etree.cElementTree as etree +import xml.etree.ElementTree as ET from datetime import datetime, timedelta import tempfile import getpass from appdirs import user_cache_dir -import requests from simpleanidb.helper import download_file from simpleanidb.models import Anime from simpleanidb.exceptions import GeneralError -import xml.etree.ElementTree as ET -__version__ = "0.1.0" -__author__ = "Dennis Lutter" +import requests + +__version__ = '0.1.0' +__author__ = 'Dennis Lutter' # Get this file directly from anidb batch import api -ANIME_TITLES_URL = "http://anidb.net/api/anime-titles.xml.gz" +ANIME_TITLES_URL = 'http://anidb.net/api/anime-titles.xml.gz' # Get this file from ScudLee's managed anidb lists -ANIME_LIST_URL = "https://raw.githubusercontent.com/ScudLee/anime-lists/master/anime-list.xml" +ANIME_LIST_URL = 'https://raw.githubusercontent.com/ScudLee/anime-lists/master/anime-list.xml' # Url for the anidb http api -ANIDB_URL = "http://api.anidb.net:9001/httpapi" +ANIDB_URL = 'http://api.anidb.net:9001/httpapi' # Request list Types -REQUEST_CATEGORY_LIST = "categorylist" -REQUEST_RANDOM_RECOMMENDATION = "randomrecommendtion" -REQUEST_HOT = "hotanime" +REQUEST_CATEGORY_LIST = 'categorylist' +REQUEST_RANDOM_RECOMMENDATION = 'randomrecommendtion' +REQUEST_HOT = 'hotanime' class Anidb(object): def __init__(self, session=None, cache_dir=None, auto_download=True, lang=None): # pylint: disable=too-many-arguments if not cache_dir: - self._cache_dir = user_cache_dir("simpleanidb", appauthor="simpleanidb") # appauthor is requered on windows + self._cache_dir = user_cache_dir('simpleanidb', appauthor='simpleanidb') # appauthor is requered on windows if not os.path.isdir(self._cache_dir): os.makedirs(self._cache_dir) else: self._cache_dir = cache_dir if not os.path.isdir(self._cache_dir): - raise ValueError("'%s' does not exist" % self._cache_dir) + raise ValueError('{0} does not exist'.format(self._cache_dir)) elif not os.access(self._cache_dir, os.W_OK): - raise IOError("'%s' is not writable" % self._cache_dir) + raise IOError('{0} is not writable'.format(self._cache_dir)) self.session = session or requests.Session() self.session.headers.setdefault('user-agent', 'simpleanidb/{0}.{1}.{2}'.format(*__version__)) self.anime_titles_path = os.path.join( - self._cache_dir, "anime-titles.xml.gz") + self._cache_dir, 'anime-titles.xml.gz') self.anime_list_path = os.path.join( - self._cache_dir, "anime-list.xml.gz") + self._cache_dir, 'anime-list.xml.gz') self.auto_download = auto_download self._xml_titles = self._xml = None self._xml_list = None self.lang = lang if not lang: - self.lang = "en" + self.lang = 'en' def _get_temp_dir(self): """Returns the system temp dir""" if hasattr(os, 'getuid'): - uid = "u%d" % (os.getuid()) - path = os.path.join(tempfile.gettempdir(), "simpleanidb-%s" % (uid)) + uid = 'u{0}'.format(os.getuid()) + path = os.path.join(tempfile.gettempdir(), 'simpleanidb-{0}'.format(uid)) else: # For Windows try: uid = getpass.getuser() - path = os.path.join(tempfile.gettempdir(), "simpleanidb-%s" % (uid)) + path = os.path.join(tempfile.gettempdir(), 'simpleanidb-{0}'.format(uid)) except ImportError: - path = os.path.join(tempfile.gettempdir(), "simpleanidb") + path = os.path.join(tempfile.gettempdir(), 'simpleanidb') # Create the directory if not os.path.exists(path): @@ -89,38 +91,56 @@ def _load_xml(self, url): return xml def search(self, term=None, autoload=False, aid=None, tvdbid=None): - if not self._xml_list: - self._xml_list = self._load_xml(ANIME_LIST_URL) + anime_ids = [] if not self._xml_titles: self._xml_titles = self._load_xml(ANIME_TITLES_URL) - anime_ids = [] if term: - for anime in self._xml_titles.findall("anime"): + for anime in self._xml_titles.findall('anime'): term = term.lower() - for title in anime.findall("title"): + for title in anime.findall('title'): if term in title.text.lower(): - anime_ids.append((int(anime.get("aid")), anime)) - break - else: - if aid: - for anime in self._xml_list.findall("anime"): - if aid == int(anime.attrib.get('anidbid')): - anime_ids.append((int(anime.attrib.get('anidbid')), anime)) + anime_ids.append((int(anime.get('aid')), anime)) break - elif tvdbid: - for anime in self._xml_list.findall("anime"): - try: - if tvdbid == int(anime.attrib.get('tvdbid')): - anime_ids.append((int(anime.attrib.get('anidbid')), anime)) - break - except: - continue + elif aid: + anime = self._xml_titles.find(".//anime[@aid='{aid}']".format(aid=aid)) + anime_ids.append((aid, anime)) + + elif tvdbid: + list_aids = self.tvdb_id_to_aid(tvdbid) + for aid in list_aids: + anime_ids.append((aid, self._xml_titles.find(".//anime[@aid='{aid}']".format(aid=aid)))) return [Anime(self, aid, autoload, xml_node) for aid, xml_node in anime_ids] + def aid_to_tvdb_id(self, aid): + """ + Tranlates an aid (anidb.info anime id) to tvdbid (thetvdb.com). + :param aid: The aid in int or string + :return: One tvdbid as string, or None. + """ + + if not self._xml_list: + self._xml_list = self._load_xml(ANIME_LIST_URL) + + anime = self._xml_list.find(".//anime[@anidbid='{aid}']".format(aid=aid)) + return anime.attrib.get('tvdbid') if anime else None + + def tvdb_id_to_aid(self, tvdbid): + """ + Tranlates a tvdbid to aid (anidb.info anime id) + :param tvdbid: The tvdbid in int or string + :return: A list of matched aid's, as one show on tvdb can be matched to multiple on anidb.info, due to season mappings. + """ + if not self._xml_list: + self._xml_list = self._load_xml(ANIME_LIST_URL) + + return [anime_xml.get('anidbid') for anime_xml in + self._xml_list.findall(".//anime[@tvdbid='{tvdbid}']". + format(tvdbid=tvdbid))] + def anime(self, aid): return Anime(self, aid) @@ -144,18 +164,18 @@ def get_list(self, request_type): @return: A list of Anime objects. """ params = { - "request": "anime", - "client": "adbahttp", - "clientver": 100, - "protover": 1, - "request": request_type + 'request': 'anime', + 'client': 'adbahttp', + 'clientver': 100, + 'protover': 1, + 'request': request_type } self._get_url(ANIDB_URL, params=params) anime_ids = [] - for anime in self._xml.findall("anime"): - anime_ids.append((int(anime.get("id")), anime)) + for anime in self._xml.findall('anime'): + anime_ids.append((int(anime.get('id')), anime)) return [Anime(self, aid, False, xml_node) for aid, xml_node in anime_ids] @@ -168,7 +188,7 @@ def _get_url(self, url, params=None): r.raise_for_status() - self._xml = ET.fromstring(r.text.encode("UTF-8")) + self._xml = ET.fromstring(r.content) if self._xml.tag == 'error': raise GeneralError(self._xml.text) diff --git a/lib/simpleanidb/models.py b/lib/simpleanidb/models.py index 7af01b33a0..57614a8f08 100644 --- a/lib/simpleanidb/models.py +++ b/lib/simpleanidb/models.py @@ -1,4 +1,6 @@ -from __future__ import absolute_import +from __future__ import absolute_import, unicode_literals + +from simpleanidb.helper import date_to_date class Anime(object): # pylint: disable=too-many-instance-attributes @@ -29,7 +31,7 @@ def __init__(self, anidb, aid, auto_load=True, xml=None): self.load() def __repr__(self): - return "".format(self.aid, self.loaded) + return ''.format(self.aid, self.loaded) @property def loaded(self): @@ -42,57 +44,57 @@ def load(self): http://api.anidb.net:9001/httpapi?request=anime&client={str}&clientver={int}&protover=1&aid={int} """ params = { - "request": "anime", - "client": "adbahttp", - "clientver": 100, - "protover": 1, - "aid": self.aid + 'request': 'anime', + 'client': 'adbahttp', + 'clientver': 100, + 'protover': 1, + 'aid': self.aid } - self._xml = self.anidb._get_url("http://api.anidb.net:9001/httpapi", params=params) + self._xml = self.anidb._get_url('http://api.anidb.net:9001/httpapi', params=params) self.fill_from_xml(self._xml) self._loaded = True def fill_from_xml(self, xml): # pylint: disable=too-many-branches - if xml.find("titles") is not None: - self.titles = [Title(self, n) for n in xml.find("titles")] + if xml.find('titles') is not None: + self.titles = [Title(self, n) for n in xml.find('titles')] else: - self.titles = [Title(self, n) for n in xml.findall("title")] + self.titles = [Title(self, n) for n in xml.findall('title')] # return # returning from here will result in not loading attribute information for anime lists like hot_animes - self.synonyms = [t for t in self.titles if t.type == "synonym"] - if xml.find("episodes") is not None: - self.all_episodes = sorted([Episode(self, n) for n in xml.find("episodes")]) + self.synonyms = [t for t in self.titles if t.type == 'synonym'] + if xml.find('episodes') is not None: + self.all_episodes = sorted([Episode(self, n) for n in xml.find('episodes')]) self.episodes = {e.number: e for e in self.all_episodes if e.type == 1} - if xml.find("picture") is not None: - self.picture = Picture(self, xml.find("picture")) - if xml.find("ratings") is not None: - if xml.find("ratings").find("permanent") is not None: - self.rating_permanent = xml.find("ratings").find("permanent").text - self.count_permanent = xml.find("ratings").find("permanent").get('count', 0) - if xml.find("ratings").find("temporary") is not None: - self.rating_temporary = xml.find("ratings").find("temporary").text - self.count_temporary = xml.find("ratings").find("temporary").get('count', 0) - if xml.find("ratings").find("review") is not None: - self.rating_review = xml.find("ratings").find("review").text - if xml.find("categories") is not None: - self.categories = [Category(self, c) for c in xml.find("categories")] - if xml.find("tags") is not None: - self.tags = sorted([Tag(self, t) for t in xml.find("tags") if t.text.strip()]) - if xml.find("startdate") is not None: - self.start_date = date_to_date(xml.find("startdate").text) - if xml.find("enddate") is not None: - self.end_date = date_to_date(xml.find("enddate").text) - if xml.find("description") is not None: - self.description = xml.find("description").text + if xml.find('picture') is not None: + self.picture = Picture(self, xml.find('picture')) + if xml.find('ratings') is not None: + if xml.find('ratings').find('permanent') is not None: + self.rating_permanent = xml.find('ratings').find('permanent').text + self.count_permanent = xml.find('ratings').find('permanent').get('count', '0') + if xml.find('ratings').find('temporary') is not None: + self.rating_temporary = xml.find('ratings').find('temporary').text + self.count_temporary = xml.find('ratings').find('temporary').get('count', '0') + if xml.find('ratings').find('review') is not None: + self.rating_review = xml.find('ratings').find('review').text + if xml.find('categories') is not None: + self.categories = [Category(self, c) for c in xml.find('categories')] + if xml.find('tags') is not None: + self.tags = sorted([Tag(self, t) for t in xml.find('tags') if t.text.strip()]) + if xml.find('startdate') is not None: + self.start_date = date_to_date(xml.find('startdate').text) + if xml.find('enddate') is not None: + self.end_date = date_to_date(xml.find('enddate').text) + if xml.find('description') is not None: + self.description = xml.find('description').text @property def title(self): - return self.get_title("main") + return self.get_title('main') def get_title(self, title_type=None, lang=None): if not title_type: - title_type = "main" + title_type = 'main' for t in self.titles: if t.type == title_type: return t @@ -124,7 +126,7 @@ def _booleans(self, *attrs): """ for attr in attrs: value = self._xml.attrib.get(attr) - setattr(self, attr, value is not None and value.lower() == "true") + setattr(self, attr, value is not None and value.lower() == 'true') def _texts(self, *attrs): """Set the text values of the given attributes. @@ -139,7 +141,7 @@ def __str__(self): return self._xml.text def __repr__(self): - return u"<{0}: {1}>".format( + return '<{0}: {1}>'.format( self.__class__.__name__, unicode(self) ) @@ -175,9 +177,9 @@ class Title(BaseAttribute): # pylint: disable=too-few-public-methods def __init__(self, anime, xml_node): super(Title, self).__init__(anime, xml_node) - # apperently xml:lang is "{http://www.w3.org/XML/1998/namespace}lang" - self.lang = self._xml.attrib["{http://www.w3.org/XML/1998/namespace}lang"] - self.type = self._xml.attrib.get("type") + # apperently xml:lang is '{http://www.w3.org/XML/1998/namespace}lang' + self.lang = self._xml.attrib['{http://www.w3.org/XML/1998/namespace}lang'] + self.type = self._xml.attrib.get('type') class Picture(BaseAttribute): # pylint: disable=too-few-public-methods @@ -187,7 +189,7 @@ def __str__(self): @property def url(self): - return "http://img7.anidb.net/pics/anime/{0}".format(self._xml.text) + return 'http://img7.anidb.net/pics/anime/{0}'.format(self._xml.text) class Episode(BaseAttribute): @@ -198,8 +200,8 @@ def __init__(self, anime, xml_node): self._texts('airdate', 'length', 'epno') self.airdate = date_to_date(self.airdate) - self.titles = [Title(self, n) for n in self._xml.findall("title")] - self.type = int(self._xml.find("epno").attrib["type"]) + self.titles = [Title(self, n) for n in self._xml.findall('title')] + self.type = int(self._xml.find('epno').attrib['type']) self.number = self.epno or 0 if self.type == 1: self.number = int(self.number) @@ -216,7 +218,7 @@ def get_title(self, lang=None): return t def __str__(self): - return u"{0}: {1}".format(self.number, self.title) + return '{0}: {1}'.format(self.number, self.title) def __cmp__(self, other): if self.type > other.type: From 6037a4c8092b4b89a1f3238c9b156ba9fa184f4f Mon Sep 17 00:00:00 2001 From: Dario Date: Sat, 2 Jul 2016 17:18:52 +0200 Subject: [PATCH 123/134] Update tnt logo, add tnt (us) logo, add rai 1 logo (#729) --- gui/slick/images/network/rai 1.png | Bin 0 -> 2407 bytes gui/slick/images/network/tnt (us).png | Bin 0 -> 1631 bytes gui/slick/images/network/tnt.png | Bin 3782 -> 1631 bytes 3 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 gui/slick/images/network/rai 1.png create mode 100644 gui/slick/images/network/tnt (us).png diff --git a/gui/slick/images/network/rai 1.png b/gui/slick/images/network/rai 1.png new file mode 100644 index 0000000000000000000000000000000000000000..4c396c20bb314793266e980d9e2b1d24b1b2381b GIT binary patch literal 2407 zcmaJ@c|4T+9v)-KlCl&cc^gNTm@#IiF_U#>45P-Kh8o*Vv%DD0(#&8~x(tp)4jrYk zw-L7}O47pgiAbcVY|%-Gij!n%yQ7oN{o{7u&*y!A%jbEX@AmxhrqO&nwKa`3ArOc* z#fwB&tZs@IP*+oYPZsP?RxE2I9{v(vL8v5_DFPwx903aiC_H8uNC%mmxZN$F69l5X zf$Qfl@uzOZvjsdja}fiV@`MUD1mff>6*Ae8pafunVO+j5Y_k3q4B&E{VGIn_o+>1Q z;asmhBG7k_j~{zaBpb(pxw-&OQoKTd2TGWLlo!Ppvv-(x9G&aiNaM2JTqVq;_Bu_(Ae6ox?Ja5#j$1H!?y$3@+4sF_aKb6oE{MK;$P7L@jlZ7A}wo#Nh%VKqP+a z8eqlX@;QQ7vGq3&m5QhE#S$i;4N^$XFog=7%jMwFBzr6pg~Fgb+})8#k4=swf;)kX z!J&v)0+HxISmKfd>}Vdymn?BPe{(nel)GpLo=_2)1d6yZAcrgx@PKb+jov6XrJewg ziE|Ia$4xXkNf`*^kkdAFH+4gOmV00st+!yH0M|U(cKk~HK){u!eR9f|g5#*#>G-el zJFLzCG5hDS3CpaA#|BEQWOU-i(D{K}zuAg=`D4X=c}z^IPC<^CR?akd*4;7ty4b05 zrKz8s7U60;vD~n9)-p-{^!`{Dx{#z+c0;wlT-8CN_xB8CK_LP>@o~M9feonAINXqa zR}96$HK4by8_mU4<7&&ys+5Usq^SkxxP!4Y3Q;d(^(p;3xRQ(=7$sw3ant#Rk`L9% zqJ2r(BfbrI?<33RGLX#@@vc&U-ejov{B`UTNg^-75XX!G}I=xd(YU_+Z_UxuQD8akouX zbEb1?Y7tS4#Ivl4Q~T_C^Q>JP=)tK{F1}mOvmwUY9OPON>^z3h>tD39BYLz4XXNi; zU8dGWFVJhdb;+WX%DDuAWqkE26P*KLbM0J2&L8Siu#rdm>I_!S)H*X^g1z*{^wP=_JcmtUt^0J!@t03!-rRfO|1Kd*D zP=gC^u0E^MZw_Rfj2W{r{`meA_o*Hm>7L?L_&hUn%}QT`aCO~P@!2MPyUSU9*w%=h zt_*{kEu+LWqo3Sc^H8AD{M%B1R}e~AM$o z)|^u-PsKKN*D*e)0Z(N4FU)Vx|3V~3H(4G?@~T%mpjmw|qhTiR#M-YqUBmPe`5XPF zjDUnKS~p#8nGk0xvhMEJrcAA19Dcv^lt)d9N>5$9EqdSSH726%28Ijpk{Bfa-|LyD zJ_NrWeEA+9SnHT=;g9kpaJ9;qsnHQPJhp{zK1pk4ci?2@&o@E4v@~hCA2YexACrPP zZ&B@&iDsuo8#zFs|xz25txP1gvYZPxA*vDJ0U zJ)__$+v)@ozPqul3VPYLv*Hn+WeWBzsA}cfvSmE4V?)sAYi)itwcPO8$@Gh6IB7!O z;U$kC$B*cU_gfZz;^H1+oQ*Ev+(N>Tdif=6fqKPLySEV@FPs=0Y-g1gaCg@@&)ha`x#sdJzICGuVb=iQNcI4y3k#4P zZ!e$?k9}H}ogd`CVRSGub7JIm^+LuG`H{{2E%7cpz6dr;GM9FK)rhIDubf}qnFn1Vu+6uQ!mf+cOSLZQ-npge480@T{3wviUB4-j;Y zF{g(**q9SR$EJ*(sCYP`igJo4(^GkD2Xl%;aOabg>6E1ixIe~nm;4_0`Fy^w&t<0~ zA(rLt?+yR}OSVj$M6XNf7j&CWf2%4t7txC!B}t)_WEN%Bm0)WdxohpS&k;e7~#MiPAH8Cwd5HojV;t@37Ak26Beym zXQFhZ5p*hQGRX~;5TY}EQ-Z-XDQmQhw~1~TY}1%vl#Mt`ngGh>{|_}7CeaouiFlju ze+pYvc_so*A}r(vvz8uQR)8~4hLO}y^L_AZ(5M7caX2t{#$&tlTik-W} zy!G4sMsxKIIl23~D+jyZ^RYiTlwC1cG5Fn}|NE{5%c0dnjr-Y*jV%fF^Fo+2wyscn zoOuWPEvk`u@lJe&YBbs_aKZMYjP!<|8@*dr)?YdO`ZJcP_FVD)->*>NE$&E2{mAES z4e5uAr=8q-ps>UfMXN+T!0HIMy6K6I$bRXk$p+WHnnJZS#qrU5z9jy^z`m~(*MZ8i z-n6(S5o~Vx4IgAiN9{Ab==uPI^Y=BTjQKV>OiEYVrrPp&%7vctvgGG6=Bmun9;7cu8fm#ckjF}29A zM`G{mZWW}4U%gOvdSkbeRpa;IFt+E?U!7gI2f!3;9>>kt+2fM&3+B>u$XGG3sFfAQy22XLUv zZ&R)~zoG43iq=;YCpO(uE$i0u+RHB2&fmfNHZv|2C6T>7X}P+uOUnhD&vI`Dj>Pxw zKHm9@`?GMhiKdKGwz5k8p>nkVT~D4&)5Aiws9s(;qpkkks`p3|lf0^Xhh1)P^Mz*< wX05T@yjB={v$^w*r|R*rEvk;WXm=mO16V#h8e6IEmpFgdr;GM9FK)rhIDubf}qnFn1Vu+6uQ!mf+cOSLZQ-npge480@T{3wviUB4-j;Y zF{g(**q9SR$EJ*(sCYP`igJo4(^GkD2Xl%;aOabg>6E1ixIe~nm;4_0`Fy^w&t<0~ zA(rLt?+yR}OSVj$M6XNf7j&CWf2%4t7txC!B}t)_WEN%Bm0)WdxohpS&k;e7~#MiPAH8Cwd5HojV;t@37Ak26Beym zXQFhZ5p*hQGRX~;5TY}EQ-Z-XDQmQhw~1~TY}1%vl#Mt`ngGh>{|_}7CeaouiFlju ze+pYvc_so*A}r(vvz8uQR)8~4hLO}y^L_AZ(5M7caX2t{#$&tlTik-W} zy!G4sMsxKIIl23~D+jyZ^RYiTlwC1cG5Fn}|NE{5%c0dnjr-Y*jV%fF^Fo+2wyscn zoOuWPEvk`u@lJe&YBbs_aKZMYjP!<|8@*dr)?YdO`ZJcP_FVD)->*>NE$&E2{mAES z4e5uAr=8q-ps>UfMXN+T!0HIMy6K6I$bRXk$p+WHnnJZS#qrU5z9jy^z`m~(*MZ8i z-n6(S5o~Vx4IgAiN9{Ab==uPI^Y=BTjQKV>OiEYVrrPp&%7vctvgGG6=Bmun9;7cu8fm#ckjF}29A zM`G{mZWW}4U%gOvdSkbeRpa;IFt+E?U!7gI2f!3;9>>kt+2fM&3+B>u$XGG3sFfAQy22XLUv zZ&R)~zoG43iq=;YCpO(uE$i0u+RHB2&fmfNHZv|2C6T>7X}P+uOUnhD&vI`Dj>Pxw zKHm9@`?GMhiKdKGwz5k8p>nkVT~D4&)5Aiws9s(;qpkkks`p3|lf0^Xhh1)P^Mz*< wX05T@yjB={v$^w*r|R*rEvk;WXm=mO16V#h8e6IEmpFgKLZ*U+IBfRsybQWXdwQbLP>6pAqfylh#{fb6;Z(vMMVS~$e@S=j*ftg6;Uhf59&ghTmgWD0l;*T zI709Y^p6lP1rIRMx#05C~cW=H_Aw*bJ-5DT&Z2n+x)QHX^p z00esgV8|mQcmRZ%02D^@S3L16t`O%c004NIvOKvYIYoh62rY33S640`D9%Y2D-rV&neh&#Q1i z007~1e$oCcFS8neI|hJl{-P!B1ZZ9hpmq0)X0i`JwE&>$+E?>%_LC6RbVIkUx0b+_+BaR3cnT7Zv!AJxW zizFb)h!jyGOOZ85F;a?DAXP{m@;!0_IfqH8(HlgRxt7s3}k3K`kFu>>-2Q$QMFfPW!La{h336o>X zu_CMttHv6zR;&ZNiS=X8v3CR#fknUxHUxJ0uoBa_M6WNWeqIg~6QE69c9o#eyhGvpiOA@W-aonk<7r1(?fC{oI5N*U!4 zfg=2N-7=cNnjjOr{yriy6mMFgG#l znCF=fnQv8CDz++o6_Lscl}eQ+l^ZHARH>?_s@|##Rr6KLRFA1%Q+=*RRWnoLsR`7U zt5vFIcfW3@?wFpwUVxrVZ>QdQz32KIeJ}k~{cZZE^+ya? z2D1z#2HOnI7(B%_ac?{wFUQ;QQA1tBKtrWrm0_3Rgps+?Jfqb{jYbcQX~taRB;#$y zZN{S}1|}gUOHJxc?wV3fxuz+mJ4`!F$IZ;mqRrNsHJd##*D~ju=bP7?-?v~|cv>vB zsJ6IeNwVZxrdjT`yl#bBIa#GxRa#xMMy;K#CDyyGyQdMSxlWT#tDe?p!?5wT$+oGt z8L;Kp2HUQ-ZMJ=3XJQv;x5ci*?vuTfeY$;({XGW_huIFR9a(?@3)XSs8O^N5RyOM=TTmp(3=8^+zpz2r)C z^>JO{deZfso3oq3?Wo(Y?l$ge?uXo;%ru`Vo>?<<(8I_>;8Eq#KMS9gFl*neeosSB zfoHYnBQIkwkyowPu(zdms`p{<7e4kra-ZWq<2*OsGTvEV%s0Td$hXT+!*8Bnh2KMe zBmZRodjHV?r+_5^X9J0WL4jKW`}lf%A-|44I@@LTvf1rHjG(ze6+w@Jt%Bvjts!X0 z?2xS?_ve_-kiKB_KiJlZ$9G`c^=E@oNG)mWWaNo-3TIW8)$Hg0Ub-~8?KhvJ>$ z3*&nim@mj(aCxE5!t{lw7O5^0EIO7zOo&c6l<+|iDySBWCGrz@C5{St!X3hAA}`T4 z(TLbXTq+(;@<=L8dXnssyft|w#WSTW<++3>sgS%(4NTpeI-VAqb|7ssJvzNHgOZVu zaYCvgO_R1~>SyL=cFU|~g|hy|Zi}}s9+d~lYqOB71z9Z$wnC=pR9Yz4DhIM>Wmjgu z&56o6maCpC&F##y%G;1PobR9i?GnNg;gYtchD%p19a!eQtZF&3JaKv33gZ<8D~47E ztUS1iwkmDaPpj=$m#%)jCVEY4fnLGNg2A-`YwHVD3gv};>)hAvT~AmqS>Lr``i7kw zJ{5_It`yrBmlc25DBO7E8;5VoznR>Ww5hAaxn$2~(q`%A-YuS64wkBy=9dm`4cXeX z4c}I@?e+FW+b@^RDBHV(wnMq2zdX3SWv9u`%{xC-q*U}&`cyXV(%rRT*Z6MH?i+i& z_B8C(+grT%{XWUQ+f@NoP1R=AW&26{v-dx)iK^-Nmiuj8txj!m?Z*Ss1N{dh4z}01 z)YTo*JycSU)+_5r4#yw9{+;i4Ee$peRgIj+;v;ZGdF1K$3E%e~4LaI(jC-u%2h$&R z9cLXcYC@Xwnns&bn)_Q~Te?roKGD|d-g^8;+aC{{G(1^(O7m37Y1-+6)01cN&y1aw zoqc{T`P^XJqPBbIW6s}d4{z_f5Om?vMgNQEJG?v2T=KYd^0M3I6IZxbny)%vZR&LD zJpPl@Psh8QyPB@KTx+@RdcC!KX7}kEo;S|j^u2lU7XQ}Oo;f|;z4Ll+_r>@1-xl3| zawq-H%e&ckC+@AhPrP6BKT#_XdT7&;F71j}Joy zkC~6lh7E@6o;W@^IpRNZ{ptLtL(gQ-CY~4mqW;US7Zxvm_|@yz&e53Bp_lTPlfP|z zrTyx_>lv@x#=^!PzR7qqF<$gm`|ZJZ+;<)Cqu&ot2z=0000WV@Og>004R=004l4008;_004mL004C`008P>0026e000+nl3&F} z000B=Nkl7zgmrT-H|Fx{wPks@Y}caNWsZ4ZBd1rBP7$N740*_KcCb|CD^MjYerknd6f2Riy@8z8H zyyyMC@B2L8=Q&ja0|Uk)y>+c4eRL8o!$mj~Q*e0P+lg*$!Y5dT-$$afzOQpk-(PhQ z0GNS$aRX{csP=Dk;K=~xe+fVp7GpuCe*j-$OR~zxVQPStmB(>^UH}{-A2|(g##S%M zhj;<2v2~dDPR6CU87rCJ$X$hKn%=-$#E_ z=xi=J04HHBYNGvYeEiA-(Dg2+1t4$)KE;Ve2A~t;qy7l)EQ8M5(H`}h7^9E?JQX_t zmg3Ry=agj2AjT#C!(1scMt!%sLpC*!&~yhRC!uCaQ7a_xXcN&QCoNvgZOC`aI^ zAUW^>*5?So=ZOuNkGdiNaBWgwEPr_-sm~p&vT_iiSyAr7dpHs^u_stxNe&Ia-j8#z z4d2AQhOir7#B)vfC&MboN1U)SbkyRKAXgOypef4FV$#>+? zK)-H3E@`L_eiiS;b=D{+d32IU>93d@jI-Ah&1s{2eQE?eJ0c%28aX4%p8CGdfrYV2#0iUrE+S-@0hO zgcqZ4L-JgA^sUI~oSoQ~RYeGfCWciWhmRtek*9oYFtPeC>gxMCcUF1_?1&W>+k)SsJt?U1^dWS$XS_^V4113jhEB07*qoM6N<$f_eoI*Z=?k From f35d6da5dd251b60f832a7ea2030ab59faf2b5ff Mon Sep 17 00:00:00 2001 From: Dario Date: Sun, 3 Jul 2016 15:06:38 +0200 Subject: [PATCH 124/134] Added TorrentShack torrent provider (#718) --- sickbeard/providers/__init__.py | 4 +- sickbeard/providers/torrentshack.py | 193 ++++++++++++++++++++++++++++ 2 files changed, 195 insertions(+), 2 deletions(-) create mode 100644 sickbeard/providers/torrentshack.py diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py index 4f9fbfc260..18dc1df4a1 100644 --- a/sickbeard/providers/__init__.py +++ b/sickbeard/providers/__init__.py @@ -25,7 +25,7 @@ omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, speedcd, nyaatorrents, bluetigers, xthor, abnormal, torrentbytes, cpasbien,\ freshontv, morethantv, t411, tokyotoshokan, shazbat, rarbg, alpharatio, tntvillage, binsearch, torrentproject, extratorrent, \ scenetime, btdigg, transmitthenet, tvchaosuk, bitcannon, pretome, gftracker, hdspace, newpct, elitetorrent, bitsnoop, danishbits, hd4free, limetorrents, \ - norbits, ilovetorrents, anizb, bithdtv, zooqle, animebytes + norbits, ilovetorrents, anizb, bithdtv, zooqle, animebytes, torrentshack __all__ = [ 'womble', 'btn', 'thepiratebay', 'kat', 'torrentleech', 'scc', 'hdtorrents', @@ -36,7 +36,7 @@ 'xthor', 'abnormal', 'scenetime', 'btdigg', 'transmitthenet', 'tvchaosuk', 'torrentproject', 'extratorrent', 'bitcannon', 'torrentz', 'pretome', 'gftracker', 'hdspace', 'newpct', 'elitetorrent', 'bitsnoop', 'danishbits', 'hd4free', 'limetorrents', - 'norbits', 'ilovetorrents', 'anizb', 'bithdtv', 'zooqle', 'animebytes' + 'norbits', 'ilovetorrents', 'anizb', 'bithdtv', 'zooqle', 'animebytes', 'torrentshack' ] diff --git a/sickbeard/providers/torrentshack.py b/sickbeard/providers/torrentshack.py new file mode 100644 index 0000000000..bc43e84e4c --- /dev/null +++ b/sickbeard/providers/torrentshack.py @@ -0,0 +1,193 @@ +# coding=utf-8 +# Author: medariox (dariox@gmx.com) +# +# This file is part of Medusa. +# +# Medusa is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Medusa is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Medusa. If not, see . + +from __future__ import unicode_literals + +import re +import traceback + +from requests.compat import urljoin +from requests.utils import dict_from_cookiejar + +from sickbeard import logger, tvcache +from sickbeard.bs4_parser import BS4Parser + +from sickrage.helper.common import convert_size, try_int +from sickrage.providers.torrent.TorrentProvider import TorrentProvider + + +class TorrentShackProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes + """TorrentShack Torrent provider""" + def __init__(self): + + # Provider Init + TorrentProvider.__init__(self, 'TorrentShack') + + # Credentials + self.username = None + self.password = None + + # Torrent Stats + self.minseed = 0 + self.minleech = 0 + + # URLs + self.url = 'https://www.torrentshack.me' + self.urls = { + 'login': urljoin(self.url, 'login.php'), + 'search': urljoin(self.url, 'torrents.php'), + } + + # Proper Strings + self.proper_strings = ['PROPER', 'REPACK', 'REAL'] + + # Cache + self.cache = tvcache.TVCache(self, min_time=20) # Only poll TorrentShack every 20 minutes max + + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches + """ + TorrentShack search and parsing + + :param search_string: A dict with mode (key) and the search value (value) + :param age: Not used + :param ep_obj: Not used + :returns: A list of search results (structure) + """ + results = [] + if not self.login(): + return results + + # Search Params + search_params = { + 'searchstr': '', + 'release_type': 'both', + 'searchtags': '', + 'tags_type': 0, + 'order_by': 's3', + 'order_way': 'desc', + 'torrent_preset': 'all', + 'filter_cat[600]': 1, + 'filter_cat[960]': 1, + 'filter_cat[620]': 1, + 'filter_cat[320]': 1, + 'filter_cat[700]': 1, + 'filter_cat[970]': 1, + 'filter_cat[981]': 1, + 'filter_cat[850]': 1, + 'filter_cat[980]': 1, + } + + # Units + units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'] + + for mode in search_strings: + items = [] + logger.log('Search mode: {0}'.format(mode), logger.DEBUG) + + for search_string in search_strings[mode]: + + if mode != 'RSS': + search_params['searchstr'] = search_string + logger.log('Search string: {search}'.format + (search=search_string), logger.DEBUG) + + response = self.get_url(self.urls['search'], params=search_params, returns='response') + if not response or not response.text: + logger.log('No data returned from provider', logger.DEBUG) + continue + + with BS4Parser(response.text, 'html5lib') as html: + torrent_rows = html.find_all('tr', class_='torrent') + if not torrent_rows: + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + continue + + for result in torrent_rows: + try: + cells = result('td') + + title = cells[1].find('span', class_='torrent_name_link').get_text() + download_url = cells[1].find('span', class_='torrent_handle_links') + download_url = download_url.find('a').find_next('a').get('href') + download_url = urljoin(self.url, download_url) + if not all([title, download_url]): + continue + + seeders = try_int(cells[6].get_text()) + leechers = try_int(cells[7].get_text()) + + # Filter unseeded torrent + if seeders < min(self.minseed, 1): + if mode != 'RSS': + logger.log("Discarding torrent because it doesn't meet the " + "minimum seeders: {0}. Seeders: {1}".format + (title, seeders), logger.DEBUG) + continue + + torrent_size = cells[4].get_text() + torrent_size = re.search('\d+.\d+.\w+', torrent_size).group(0) + size = convert_size(torrent_size, units=units) or -1 + + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': None, + 'hash': None, + } + if mode != 'RSS': + logger.log('Found result: {0} with {1} seeders and {2} leechers'.format + (title, seeders, leechers), logger.DEBUG) + + items.append(item) + except (AttributeError, TypeError, KeyError, ValueError, IndexError): + logger.log('Failed parsing provider. Traceback: {0!r}'.format + (traceback.format_exc()), logger.ERROR) + continue + + results += items + + return results + + def login(self): + """Login method used for logging in before doing search and torrent downloads.""" + if any(dict_from_cookiejar(self.session.cookies).values()): + return True + + login_params = { + 'username': self.username, + 'password': self.password, + 'keeplogged': '1', + 'login': 'Login', + } + + response = self.get_url(self.urls['login'], post_data=login_params, returns='text') + if not response: + logger.log('Unable to connect to provider', logger.WARNING) + return False + + if 'Login :: TorrentShack.me' in response: + logger.log('Invalid username or password. Check your settings', logger.WARNING) + return False + + return True + + +provider = TorrentShackProvider() From 5c2fb31893df15e3b7d6ec038fe1a99c3362bb4b Mon Sep 17 00:00:00 2001 From: p0ps Date: Tue, 5 Jul 2016 15:24:40 +0200 Subject: [PATCH 125/134] Added cookies_ingest attribute to torrent providers. Should fix the TD captcha. (#727) * Added cookies_ingest attribute to torrent providers. Users will now be able to configure cookies for providers with the cookies_ingest attribute. This is now only enabled for torrentday. To fix the captcha issue. * Fixed always exiting with False. * When you enter wrong cookie, and stop medusa. Then next time, it will keep on using the old cookies. Fixed this, by invalidating the cookies, when it doesn't get back a json, but html. * Too many parameters for format * Fixed codacy error. Don't materialize inner list. * Reworked the rss_torrent cookie handling. There is now one provider cookie attribute. But as the rss_providers are added and checked if it get's a result, I had to do make some changes, they can be configured from the "configure custom torrent provider" and "provider options" tab. Also i've introduced a new attribute named provider.enable_cookies next to the provider.cookies attribute. As we need to get rid of the hasattr() in the mako templates sometime anyway. New providers, which need to make use of the cookies functionality, only need to have set the provider.enable_cookies = True. * Import not needed anymore * Replaced the return values of the rss_validate function tuple with dicts. * Added ui notifications for providers that need cookies added like TD. The user will get a message, that it needs to check the cookies for that provider. * Added default value to add_cookies_from_ui() when enable_cookies is not setup correctly for a provider. * Made sure the cookies validation message is also send back for rsstorrent.py --- gui/slick/js/configProviders.js | 2 +- gui/slick/views/config_providers.mako | 21 ++++++++- sickbeard/__init__.py | 6 +++ sickbeard/providers/rsstorrent.py | 36 ++++++++-------- sickbeard/providers/torrentday.py | 43 ++++++++----------- sickbeard/server/web/config/providers.py | 12 ++++-- sickrage/providers/GenericProvider.py | 38 +++++++++++++++- sickrage/providers/torrent/TorrentProvider.py | 3 +- 8 files changed, 110 insertions(+), 51 deletions(-) diff --git a/gui/slick/js/configProviders.js b/gui/slick/js/configProviders.js index 0ed7315459..61a96898a1 100644 --- a/gui/slick/js/configProviders.js +++ b/gui/slick/js/configProviders.js @@ -283,7 +283,7 @@ $(document).ready(function(){ } else { $('#torrentrss_name').attr("disabled", "disabled"); $('#torrentrss_url').removeAttr("disabled"); - $('#torrentrss_cookies').removeAttr("disabled"); + $('#torrentrss_cookies').attr("disabled", "disabled"); $('#torrentrss_titleTAG').removeAttr("disabled"); $('#torrentrss_delete').removeAttr("disabled"); } diff --git a/gui/slick/views/config_providers.mako b/gui/slick/views/config_providers.mako index 3c4c5d0688..89fbd26e68 100644 --- a/gui/slick/views/config_providers.mako +++ b/gui/slick/views/config_providers.mako @@ -408,6 +408,23 @@ $('#config-components').tabs();
    % endif + % if curTorrentProvider.enable_cookies: +
    + + +
    + % endif + % if hasattr(curTorrentProvider, 'passkey'):