From a22a05aec63930a2f500359ae703f6631e1d7819 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Fri, 17 Feb 2017 21:48:57 +0100 Subject: [PATCH 001/344] Switched queue to unicode_literals. Had to set some bytes on sqlite results. --- medusa/search/queue.py | 52 ++++++++++++++++++++++-------------------- 1 file changed, 27 insertions(+), 25 deletions(-) diff --git a/medusa/search/queue.py b/medusa/search/queue.py index 4f57abe5a8..143f325561 100644 --- a/medusa/search/queue.py +++ b/medusa/search/queue.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License # along with Medusa. If not, see . +"""Module with different types of Queue Items for searching and snatching.""" from __future__ import unicode_literals @@ -96,7 +97,7 @@ def add_item(self, item): and not self.is_in_queue(item.show, item.segment): generic_queue.GenericQueue.add_item(self, item) else: - logger.log(u"Not adding item, it's already in the queue", logger.DEBUG) + logger.log("Not adding item, it's already in the queue", logger.DEBUG) def force_daily(self): if not self.is_dailysearch_in_progress and not self.currentItem.amActive: @@ -184,7 +185,7 @@ def add_item(self, item): # manual, snatch and failed searches generic_queue.GenericQueue.add_item(self, item) else: - logger.log(u"Not adding item, it's already in the queue", logger.DEBUG) + logger.log("Not adding item, it's already in the queue", logger.DEBUG) class SnatchQueue(generic_queue.GenericQueue): @@ -231,7 +232,7 @@ def add_item(self, item): # backlog searches generic_queue.GenericQueue.add_item(self, item) else: - logger.log(u"Not adding item, it's already in the queue", logger.DEBUG) + logger.log("Not adding item, it's already in the queue", logger.DEBUG) class DailySearchQueueItem(generic_queue.QueueItem): @@ -249,20 +250,20 @@ def run(self): self.started = True try: - logger.log(u"Beginning daily search for new episodes") + logger.log("Beginning daily search for new episodes") found_results = search_for_needed_episodes() if not found_results: - logger.log(u"No needed episodes found") + logger.log("No needed episodes found") else: for result in found_results: # just use the first result for now if result.seeders not in (-1, None) and result.leechers not in (-1, None): - logger.log(u"Downloading {0} with {1} seeders and {2} leechers and size {3} from {4}".format + logger.log("Downloading {0} with {1} seeders and {2} leechers and size {3} from {4}".format (result.name, result.seeders, result.leechers, pretty_file_size(result.size), result.provider.name)) else: - logger.log(u"Downloading {0} with size: {1} from {2}".format + logger.log("Downloading {0} with size: {1} from {2}".format (result.name, pretty_file_size(result.size), result.provider.name)) self.success = snatch_episode(result) @@ -313,7 +314,7 @@ def run(self): self.started = True try: - logger.log(u'Beginning {0} {1}search for: [{2}]'. + logger.log('Beginning {0} {1}search for: [{2}]'. format(('forced', 'manual')[bool(self.manual_search)], ('', 'season pack ')[bool(self.manual_search_type == 'season')], self.segment[0].pretty_name())) @@ -325,11 +326,11 @@ def run(self): for result in search_result: # Just use the first result for now if result.seeders not in (-1, None) and result.leechers not in (-1, None): - logger.log(u'Downloading {0} with {1} seeders and {2} leechers and size {3} from {4}'.format + logger.log('Downloading {0} with {1} seeders and {2} leechers and size {3} from {4}'.format (result.name, result.seeders, result.leechers, pretty_file_size(result.size), result.provider.name)) else: - logger.log(u'Downloading {0} with size: {1} from {2}'.format + logger.log('Downloading {0} with size: {1} from {2}'.format (result.name, pretty_file_size(result.size), result.provider.name)) self.success = snatch_episode(result) @@ -350,7 +351,7 @@ def run(self): else: ui.notifications.message('No results were found') - logger.log(u'Unable to find {0} {1}results for: [{2}]'. + logger.log('Unable to find {0} {1}results for: [{2}]'. format(('forced', 'manual')[bool(self.manual_search)], ('', 'season pack ')[bool(self.manual_search_type == 'season')], self.segment[0].pretty_name())) @@ -409,23 +410,24 @@ def run(self): search_result.leechers = int(self.cached_result[b'leechers']) search_result.release_group = self.cached_result[b'release_group'] search_result.version = int(self.cached_result[b'version']) - search_result.proper_tags = self.cached_result[b'proper_tags'].split('|') if self.cached_result[b'proper_tags'] else '' + search_result.proper_tags = self.cached_result[b'proper_tags'].split('|') \ + if self.cached_result[b'proper_tags'] else '' search_result.manually_searched = True try: - logger.log(u"Beginning to manual snatch release: {0}".format(search_result.name)) + logger.log("Beginning to manual snatch release: {0}".format(search_result.name)) if search_result: if search_result.seeders not in (-1, None) and search_result.leechers not in (-1, None): - logger.log(u"Downloading {0} with {1} seeders and {2} leechers and size {3} from {4}".format + logger.log("Downloading {0} with {1} seeders and {2} leechers and size {3} from {4}".format (search_result.name, search_result.seeders, search_result.leechers, pretty_file_size(search_result.size), search_result.provider.name)) else: - logger.log(u"Downloading {0} with size: {1} from {2}".format + logger.log("Downloading {0} with size: {1} from {2}".format (search_result.name, pretty_file_size(search_result.size), search_result.provider.name)) self.success = snatch_episode(search_result) else: - logger.log(u"Unable to snatch release: {0}".format(search_result.name)) + logger.log("Unable to snatch release: {0}".format(search_result.name)) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) @@ -463,25 +465,25 @@ def run(self): if not self.show.paused: try: - logger.log(u"Beginning backlog search for: [" + self.show.name + "]") + logger.log("Beginning backlog search for: [" + self.show.name + "]") search_result = search_providers(self.show, self.segment) if search_result: for result in search_result: # just use the first result for now if result.seeders not in (-1, None) and result.leechers not in (-1, None): - logger.log(u"Downloading {0} with {1} seeders and {2} leechers and size {3} from {4}".format + logger.log("Downloading {0} with {1} seeders and {2} leechers and size {3} from {4}".format (result.name, result.seeders, result.leechers, pretty_file_size(result.size), result.provider.name)) else: - logger.log(u"Downloading {0} with size: {1} from {2}".format + logger.log("Downloading {0} with size: {1} from {2}".format (result.name, pretty_file_size(result.size), result.provider.name)) self.success = snatch_episode(result) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) else: - logger.log(u"No needed episodes found during backlog search for: [" + self.show.name + "]") + logger.log("No needed episodes found during backlog search for: [" + self.show.name + "]") except Exception: self.success = False @@ -516,7 +518,7 @@ def run(self): try: for ep_obj in self.segment: - logger.log(u"Marking episode as bad: [" + ep_obj.pretty_name() + "]") + logger.log("Marking episode as bad: [" + ep_obj.pretty_name() + "]") failed_history.mark_failed(ep_obj) @@ -526,7 +528,7 @@ def run(self): history.log_failed(ep_obj, release, provider) failed_history.revert_episode(ep_obj) - logger.log(u"Beginning failed download search for: [" + ep_obj.pretty_name() + "]") + logger.log("Beginning failed download search for: [" + ep_obj.pretty_name() + "]") # If it is wanted, self.down_cur_quality doesnt matter # if it isnt wanted, we need to make sure to not overwrite the existing ep that we reverted to! @@ -536,18 +538,18 @@ def run(self): for result in search_result: # just use the first result for now if result.seeders not in (-1, None) and result.leechers not in (-1, None): - logger.log(u"Downloading {0} with {1} seeders and {2} leechers and size {3} from {4}".format + logger.log("Downloading {0} with {1} seeders and {2} leechers and size {3} from {4}".format (result.name, result.seeders, result.leechers, pretty_file_size(result.size), result.provider.name)) else: - logger.log(u"Downloading {0} with size: {1} from {2}".format + logger.log("Downloading {0} with size: {1} from {2}".format (result.name, pretty_file_size(result.size), result.provider.name)) self.success = snatch_episode(result) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) else: - logger.log(u"No needed episodes found during failed search for: [" + self.show.name + "]") + logger.log("No needed episodes found during failed search for: [" + self.show.name + "]") except Exception: self.success = False From 416a921272480e5d8f979242689e66a996bd2be0 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Fri, 17 Feb 2017 21:50:21 +0100 Subject: [PATCH 002/344] Added new style logger for Series.py. --- medusa/tv/series.py | 387 +++++++++++++++++++++----------------------- 1 file changed, 183 insertions(+), 204 deletions(-) diff --git a/medusa/tv/series.py b/medusa/tv/series.py index c02a0841ef..3ab8e1f832 100644 --- a/medusa/tv/series.py +++ b/medusa/tv/series.py @@ -4,6 +4,7 @@ import copy import datetime import glob +import logging import os.path import shutil import stat @@ -22,7 +23,6 @@ db, helpers, image_cache, - logger, network_timezones, notifiers, post_processor, @@ -98,6 +98,8 @@ MILLIS_YEAR_1900 = datetime.datetime(year=1900, month=1, day=1).toordinal() +logger = logging.getLogger(__name__) + class Series(TV): """Represent a TV Show.""" @@ -172,10 +174,7 @@ def create_indexer(self, banners=False, actors=False, dvd_order=False, episodes= if self.lang: params[b'language'] = self.lang - logger.log( - u'{id}: Using language from show settings: {lang}'.format - (id=self.indexerid, lang=self.lang), logger.DEBUG - ) + logger.debug(u'{id}: Using language from show settings: {lang}', id=self.indexerid, lang=self.lang) if self.dvd_order != 0 or dvd_order: params[b'dvdorder'] = True @@ -253,13 +252,11 @@ def indexer_slug(self): @location.setter def location(self, value): - logger.log( - u'{indexer} {id}: Setting location: {location}'.format( - indexer=indexerApi(self.indexer).name, - id=self.indexerid, - location=value - ), - logger.DEBUG + logger.debug( + u'{indexer} {id}: Setting location: {location}', + indexer=indexerApi(self.indexer).name, + id=self.indexerid, + location=value ) # Don't validate dir if user wants to add shows without creating a dir if app.ADD_SHOWS_WO_DIR or self.is_location_valid(value): @@ -437,28 +434,26 @@ def get_episode(self, season=None, episode=None, filepath=None, no_create=False, b'FROM tv_episodes ' \ b'WHERE showid = ? AND absolute_number = ? AND season != 0' sql_args = [self.indexerid, absolute_number] - logger.log(u'{id}: Season and episode lookup for {show} using absolute number {absolute}'. - format(id=self.indexerid, absolute=absolute_number, show=self.name), logger.DEBUG) + logger.debug(u'{id}: Season and episode lookup for {show} using absolute number {absolute}', + id=self.indexerid, absolute=absolute_number, show=self.name) elif air_date: sql = b'SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?' sql_args = [self.indexerid, air_date.toordinal()] - logger.log(u'{id}: Season and episode lookup for {show} using air date {air_date}'. - format(id=self.indexerid, air_date=air_date, show=self.name), logger.DEBUG) + logger.debug(u'{id}: Season and episode lookup for {show} using air date {air_date}', + id=self.indexerid, air_date=air_date, show=self.name) sql_results = main_db_con.select(sql, sql_args) if sql else [] if len(sql_results) == 1: episode = int(sql_results[0][b'episode']) season = int(sql_results[0][b'season']) - logger.log(u'{id}: Found season and episode which is {show} {ep}'.format - (id=self.indexerid, show=self.name, ep=episode_num(season, episode)), logger.DEBUG) + logger.debug(u'{id}: Found season and episode which is {show} {ep}', + id=self.indexerid, show=self.name, ep=episode_num(season, episode)) elif len(sql_results) > 1: - logger.log(u'{id}: Multiple entries found in show: {show} '.format - (id=self.indexerid, show=self.name), logger.ERROR) + logger.error(u'{id}: Multiple entries found in show: {show} ', id=self.indexerid, show=self.name) return None else: - logger.log(u'{id}: No entries found in show: {show}'.format - (id=self.indexerid, show=self.name), logger.DEBUG) + logger.debug(u'{id}: No entries found in show: {show}', id=self.indexerid, show=self.name) return None if season not in self.episodes: @@ -489,8 +484,8 @@ def should_update(self, update_date=datetime.date.today()): """ # if show is 'paused' do not update_date if self.paused: - logger.log(u'{id}: Show {show} is paused. Update skipped'.format - (id=self.indexerid, show=self.name), logger.INFO) + logger.info(u'{id}: Show {show} is paused. Update skipped', + id=self.indexerid, show=self.name) return False # if show is not 'Ended' always update (status 'Continuing') @@ -588,8 +583,7 @@ def write_metadata(self, show_only=False): :type show_only: bool """ if not self.is_location_valid(): - logger.log(u"{id}: Show dir doesn't exist, skipping NFO generation".format(id=self.indexerid), - logger.WARNING) + logger.warning(u"{id}: Show dir doesn't exist, skipping NFO generation", id=self.indexerid) return for metadata_provider in app.metadata_provider_dict.values(): @@ -601,7 +595,7 @@ def write_metadata(self, show_only=False): def __write_episode_nfos(self): - logger.log(u"{id}: Writing NFOs for all episodes".format(id=self.indexerid), logger.DEBUG) + logger.debug(u"{id}: Writing NFOs for all episodes", id=self.indexerid) main_db_con = db.DBConnection() sql_results = main_db_con.select( @@ -615,9 +609,8 @@ def __write_episode_nfos(self): b" AND location != ''", [self.indexerid]) for ep_result in sql_results: - logger.log(u'{id}: Retrieving/creating episode {ep}'.format - (id=self.indexerid, ep=episode_num(ep_result[b'season'], ep_result[b'episode'])), - logger.DEBUG) + logger.debug(u'{id}: Retrieving/creating episode {ep}', + id=self.indexerid, ep=episode_num(ep_result[b'season'], ep_result[b'episode'])) cur_ep = self.get_episode(ep_result[b'season'], ep_result[b'episode']) if not cur_ep: continue @@ -627,8 +620,7 @@ def __write_episode_nfos(self): def update_metadata(self): """Update show metadata files.""" if not self.is_location_valid(): - logger.log(u"{id}: Show dir doesn't exist, skipping NFO generation".format(id=self.indexerid), - logger.WARNING) + logger.warning(u"{id}: Show dir doesn't exist, skipping NFO generation", id=self.indexerid) return self.__update_show_nfo() @@ -637,7 +629,7 @@ def __update_show_nfo(self): result = False - logger.log(u"{id}: Updating NFOs for show with new indexer info".format(id=self.indexerid), logger.INFO) + logger.info(u"{id}: Updating NFOs for show with new indexer info", id=self.indexerid) # You may only call .values() on metadata_provider_dict! As on values() call the indexer_api attribute # is reset. This will prevent errors, when using multiple indexers and caching. for cur_provider in app.metadata_provider_dict.values(): @@ -648,34 +640,31 @@ def __update_show_nfo(self): def load_episodes_from_dir(self): """Find all media files in the show folder and create episodes for as many as possible.""" if not self.is_location_valid(): - logger.log(u"{id}: Show dir doesn't exist, not loading episodes from disk".format(id=self.indexerid), - logger.WARNING) + logger.warning(u"{id}: Show dir doesn't exist, not loading episodes from disk", id=self.indexerid) return - logger.log(u"{id}: Loading all episodes from the show directory: {location}".format - (id=self.indexerid, location=self.location), logger.DEBUG) + logger.debug(u"{id}: Loading all episodes from the show directory: {location}", + id=self.indexerid, location=self.location) # get file list media_files = helpers.list_media_files(self.location) - logger.log(u'{id}: Found files: {media_files}'.format - (id=self.indexerid, media_files=media_files), logger.DEBUG) + logger.debug(u'{id}: Found files: {media_files}', id=self.indexerid, media_files=media_files) # create TVEpisodes from each media file (if possible) sql_l = [] for media_file in media_files: cur_episode = None - logger.log(u"{id}: Creating episode from: {location}".format - (id=self.indexerid, location=media_file), logger.DEBUG) + logger.debug(u"{id}: Creating episode from: {location}", id=self.indexerid, location=media_file) try: cur_episode = self.make_ep_from_file(os.path.join(self.location, media_file)) except (ShowNotFoundException, EpisodeNotFoundException) as e: - logger.log(u"{id}: Episode {location} returned an exception {error_msg}".format - (id=self.indexerid, location=media_file, error_msg=ex(e)), logger.WARNING) + logger.warning(u"{id}: Episode {location} returned an exception {error_msg}", + id=self.indexerid, location=media_file, error_msg=ex(e)) continue except EpisodeDeletedException: - logger.log(u'{id}: The episode deleted itself when I tried making an object for it'.format - (id=self.indexerid), logger.DEBUG) + logger.debug(u'{id}: The episode deleted itself when I tried making an object for it', + id=self.indexerid) if cur_episode is None: continue @@ -690,8 +679,8 @@ def load_episodes_from_dir(self): parse_result = None if ' ' not in ep_file_name and parse_result and parse_result.release_group: - logger.log(u'{id}: Filename {file_name} gave release group of {rg}, seems valid'.format - (id=self.indexerid, file_name=ep_file_name, rg=parse_result.release_group), logger.DEBUG) + logger.debug(u'{id}: Filename {file_name} gave release group of {rg}, seems valid', + id=self.indexerid, file_name=ep_file_name, rg=parse_result.release_group) cur_episode.release_name = ep_file_name # store the reference in the show @@ -700,8 +689,8 @@ def load_episodes_from_dir(self): try: cur_episode.refresh_subtitles() except Exception: - logger.log(u'{id}: Could not refresh subtitles'.format(id=self.indexerid), logger.ERROR) - logger.log(traceback.format_exc(), logger.DEBUG) + logger.info(u'{id}: Could not refresh subtitles', id=self.indexerid) + logger.debug(traceback.format_exc()) sql_l.append(cur_episode.get_sql()) @@ -731,15 +720,14 @@ def load_episodes_from_db(self, seasons=None): if seasons: sql += b' AND season IN (%s)' % ','.join('?' * len(seasons)) sql_results = main_db_con.select(sql, [self.indexerid] + seasons) - logger.log(u'{id}: Loading all episodes of season(s) {seasons} from the DB'.format - (id=self.indexerid, seasons=seasons), logger.DEBUG) + logger.debug(u'{id}: Loading all episodes of season(s) {seasons} from the DB', + id=self.indexerid, seasons=seasons) else: sql_results = main_db_con.select(sql, [self.indexerid]) - logger.log(u'{id}: Loading all episodes of all seasons from the DB'.format - (id=self.indexerid), logger.DEBUG) + logger.debug(u'{id}: Loading all episodes of all seasons from the DB', id=self.indexerid) except Exception as error: - logger.log(u'{id}: Could not load episodes from the DB. Error: {error_msg}'.format - (id=self.indexerid, error_msg=error), logger.ERROR) + logger.error(u'{id}: Could not load episodes from the DB. Error: {error_msg}', + id=self.indexerid, error_msg=error) return scanned_eps cached_show = self.indexer_api[self.indexerid] @@ -757,28 +745,26 @@ def load_episodes_from_db(self, seasons=None): delete_ep = False - logger.log(u'{id}: Loading {show} {ep} from the DB'.format - (id=cur_show_id, show=cur_show_name, ep=episode_num(cur_season, cur_episode)), - logger.DEBUG) + logger.debug(u'{id}: Loading {show} {ep} from the DB', + id=cur_show_id, show=cur_show_name, ep=episode_num(cur_season, cur_episode)) if cur_season not in cached_seasons: try: cached_seasons[cur_season] = cached_show[cur_season] except IndexerSeasonNotFound as error: - logger.log(u'{id}: {error_msg} (unaired/deleted) in the indexer {indexer} for {show}. ' - u'Removing existing records from database'.format - (id=cur_show_id, error_msg=error.message, indexer=indexerApi(self.indexer).name, - show=cur_show_name), logger.DEBUG) + logger.debug(u'{id}: {error_msg} (unaired/deleted) in the indexer {indexer} for {show}. ' + u'Removing existing records from database', + id=cur_show_id, error_msg=error.message, indexer=indexerApi(self.indexer).name, + show=cur_show_name) delete_ep = True if cur_season not in scanned_eps: scanned_eps[cur_season] = {} if cur_episode == 0: - logger.log(u'{id}: Tried loading {show} {ep} from the DB. With an episode id set to 0.' - u' We dont support that. Skipping to next episode.'. - format(id=cur_show_id, show=cur_show_name, - ep=episode_num(cur_season, cur_episode)), logger.WARNING) + logger.warning(u'{id}: Tried loading {show} {ep} from the DB. With an episode id set to 0.' + u' We dont support that. Skipping to next episode.', + id=cur_show_id, show=cur_show_name, ep=episode_num(cur_season, cur_episode)) continue try: @@ -793,13 +779,12 @@ def load_episodes_from_db(self, seasons=None): cur_ep.load_from_db(cur_season, cur_episode) scanned_eps[cur_season][cur_episode] = True except EpisodeDeletedException: - logger.log(u'{id}: Tried loading {show} {ep} from the DB that should have been deleted, ' - u'skipping it'.format(id=cur_show_id, show=cur_show_name, - ep=episode_num(cur_season, cur_episode)), logger.DEBUG) + logger.debug(u'{id}: Tried loading {show} {ep} from the DB that should have been deleted, ' + u'skipping it', id=cur_show_id, show=cur_show_name, + ep=episode_num(cur_season, cur_episode)) continue - logger.log(u'{id}: Finished loading all episodes for {show} from the DB'.format - (show=cur_show_name, id=cur_show_id), logger.DEBUG) + logger.debug(u'{id}: Finished loading all episodes for {show} from the DB', show=cur_show_name, id=cur_show_id) return scanned_eps @@ -817,26 +802,20 @@ def load_episodes_from_indexer(self, seasons=None, tvapi=None): self.indexer_api = tvapi indexed_show = self.indexer_api[self.indexerid] except IndexerException as e: - logger.log( + logger.warning( u'{id}: {indexer} error, unable to update episodes.' - u' Message: {ex}'.format( + u' Message: {ex}', id=self.indexerid, indexer=indexerApi(self.indexer).name, - ex=e, - ), - logger.WARNING - ) + ex=e + ) raise - logger.log( - u'{id}: Loading all episodes from {indexer}{season_update}'.format( - id=self.indexerid, - indexer=indexerApi(self.indexer).name, - season_update=u' on seasons {seasons}'.format( - seasons=seasons - ) if seasons else u'' - ), - logger.DEBUG + logger.debug( + u'{id}: Loading all episodes from {indexer}{season_update}', + id=self.indexerid, + indexer=indexerApi(self.indexer).name, + season_update=u' on seasons {seasons}'.format(seasons=seasons) if seasons else u'' ) scanned_eps = {} @@ -857,16 +836,16 @@ def load_episodes_from_indexer(self, seasons=None, tvapi=None): if not ep: raise EpisodeNotFoundException except EpisodeNotFoundException: - logger.log(u'{id}: {indexer} object for {ep} is incomplete, skipping this episode'.format - (id=self.indexerid, indexer=indexerApi(self.indexer).name, - ep=episode_num(season, episode))) + logger.info(u'{id}: {indexer} object for {ep} is incomplete, skipping this episode', + id=self.indexerid, indexer=indexerApi(self.indexer).name, + ep=episode_num(season, episode)) continue else: try: ep.load_from_indexer(tvapi=self.indexer_api) except EpisodeDeletedException: - logger.log(u'{id}: The episode {ep} was deleted, skipping the rest of the load'.format - (id=self.indexerid, ep=episode_num(season, episode)), logger.DEBUG) + logger.debug(u'{id}: The episode {ep} was deleted, skipping the rest of the load', + id=self.indexerid, ep=episode_num(season, episode)) continue with ep.lock: @@ -880,7 +859,7 @@ def load_episodes_from_indexer(self, seasons=None, tvapi=None): # Done updating save last update date self.last_update_indexer = datetime.date.today().toordinal() - logger.log(u'{id}: Saving indexer changes to database'.format(id=self.indexerid), logger.DEBUG) + logger.debug(u'{id}: Saving indexer changes to database', id=self.indexerid) self.save_to_db() return scanned_eps @@ -1003,25 +982,26 @@ def make_ep_from_file(self, filepath): :rtype: Episode """ if not os.path.isfile(filepath): - logger.log(u"{0}: That isn't even a real file dude... {1}".format - (self.indexerid, filepath)) + logger.info(u"{indexer_id}: That isn't even a real file dude... {filepath}", + indexer_id=self.indexerid, filepath=filepath) return None - logger.log(u'{0}: Creating episode object from {1}'.format - (self.indexerid, filepath), logger.DEBUG) + logger.debug(u'{indexer_id}: Creating episode object from {filepath}', + indexer_id=self.indexerid, filepath=filepath) try: parse_result = NameParser(show=self, try_indexers=True, parse_method=( 'normal', 'anime')[self.is_anime]).parse(filepath) except (InvalidNameException, InvalidShowException) as error: - logger.log(u'{0}: {1}'.format(self.indexerid, error), logger.DEBUG) + logger.debug(u'{indexerid}: {error}', indexer_id=self.indexerid, error=error) return None episodes = [ep for ep in parse_result.episode_numbers if ep is not None] if not episodes: - logger.log(u'{0}: parse_result: {1}'.format(self.indexerid, parse_result)) - logger.log(u'{0}: No episode number found in {1}, ignoring it'.format - (self.indexerid, filepath), logger.WARNING) + logger.debug(u'{indexerid}: parse_result: {parse_result}', + indexerid=self.indexerid, parse_result=parse_result) + logger.debug(u'{indexerid}: No episode number found in {filepath}, ignoring it', + indexerid=self.indexerid, filepath=filepath) return None # for now lets assume that any episode in the show dir belongs to that show @@ -1030,8 +1010,9 @@ def make_ep_from_file(self, filepath): sql_l = [] for current_ep in episodes: - logger.log(u'{0}: {1} parsed to {2} {3}'.format - (self.indexerid, filepath, self.name, episode_num(season, current_ep)), logger.DEBUG) + logger.debug(u'{id}: {filepath} parsed to {series_name} {ep_num}', + id=self.indexerid, filepath=filepath, series_name=self.name, + ep_num=episode_num(season, current_ep)) check_quality_again = False same_file = False @@ -1043,17 +1024,18 @@ def make_ep_from_file(self, filepath): if not cur_ep: raise EpisodeNotFoundException except EpisodeNotFoundException: - logger.log(u'{0}: Unable to figure out what this file is, skipping {1}'.format - (self.indexerid, filepath), logger.ERROR) + logger.log(u'{indexerid}: Unable to figure out what this file is, skipping {filepath}', + indexerid=self.indexerid, filepath=filepath) continue else: # if there is a new file associated with this ep then re-check the quality if not cur_ep.location or os.path.normpath(cur_ep.location) != os.path.normpath(filepath): - logger.log( - u'{0}: The old episode had a different file associated with it, ' - u're-checking the quality using the new filename {1}'.format(self.indexerid, filepath), - logger.DEBUG) + logger.debug( + u'{indexerid}: The old episode had a different file associated with it, ' + u're-checking the quality using the new filename {filepath}', + indexerid=self.indexerid, filepath=filepath + ) check_quality_again = True with cur_ep.lock: @@ -1085,13 +1067,15 @@ def make_ep_from_file(self, filepath): old_ep_status = cur_ep.status new_quality = Quality.name_quality(filepath, self.is_anime) cur_ep.status = Quality.composite_status(DOWNLOADED, new_quality) - logger.log(u"{0}: Setting the status from '{1}' to '{2}' based on file: {3}. Reason: {4}".format - (self.indexerid, statusStrings[old_ep_status], statusStrings[cur_ep.status], - filepath, should_refresh_reason), logger.DEBUG) + logger.debug(u"{id}: Setting the status from '{status_old}' to '{status_cur}' " + u"based on file: {filepath}. Reason: {reason}", + id=self.indexerid, status_old=statusStrings[old_ep_status], + status_cur=statusStrings[cur_ep.status], + filepath=filepath, reason=should_refresh_reason) else: - logger.log(u"{0}: Not changing current status '{1}' based on file: {2}. " - u'Reason: {3}'.format(self.indexerid, statusStrings[cur_ep.status], - filepath, should_refresh_reason), logger.DEBUG) + logger.debug(u"{id}: Not changing current status '{status_string}' based on file: {filepath}. " + u'Reason: {should_refresh}', id=self.indexerid, status_string=statusStrings[cur_ep.status], + filepath=filepath, should_refresh=should_refresh_reason) with cur_ep.lock: sql_l.append(cur_ep.get_sql()) @@ -1108,7 +1092,7 @@ def make_ep_from_file(self, filepath): def _load_from_db(self): - logger.log(u'{id}: Loading show info from database'.format(id=self.indexerid), logger.DEBUG) + logger.debug(u'{id}: Loading show info from database', id=self.indexerid) main_db_con = db.DBConnection() sql_results = main_db_con.select(b'SELECT * FROM tv_shows WHERE indexer_id = ?', [self.indexerid]) @@ -1116,7 +1100,7 @@ def _load_from_db(self): if len(sql_results) > 1: raise MultipleShowsInDatabaseException() elif not sql_results: - logger.log(u'{0}: Unable to find the show in the database'.format(self.indexerid)) + logger.info(u'{indexerid}: Unable to find the show in the database', indexerid=self.indexerid) return else: self.indexer = int(sql_results[0][b'indexer'] or 0) @@ -1183,9 +1167,8 @@ def _load_from_db(self): ) if not sql_results: - logger.log(u'{id}: Unable to find IMDb info' - u' in the database: {show}'.format - (id=self.indexerid, show=self.name)) + logger.info(u'{id}: Unable to find IMDb info' + u' in the database: {show}', id=self.indexerid, show=self.name) return else: self.imdb_info = dict(zip(sql_results[0].keys(), sql_results[0])) @@ -1201,8 +1184,8 @@ def load_from_indexer(self, tvapi=None): if self.indexer == INDEXER_TVRAGE: return - logger.log(u'{0}: Loading show info from {1}'.format( - self.indexerid, indexerApi(self.indexer).name), logger.DEBUG) + logger.debug(u'{id}: Loading show info from {indexer_name}', + id=self.indexerid, indexer_name=indexerApi(self.indexer).name) self.indexer_api = tvapi indexed_show = self.indexer_api[self.indexerid] @@ -1250,22 +1233,22 @@ def load_imdb_info(self): self.imdb_id = helpers.title_to_imdb(self.name, self.start_year, imdb_api) if not self.imdb_id: - logger.log(u"{0}: Not loading show info from IMDb, " - u"because we don't know its ID.".format(self.indexerid)) + logger.info(u"{indexerid}: Not loading show info from IMDb, " + u"because we don't know its ID.", indexerid=self.indexerid) return # Make sure we only use the first ID self.imdb_id = self.imdb_id.split(',')[0] - logger.log(u'{0}: Loading show info from IMDb with ID: {1}'.format( - self.indexerid, self.imdb_id), logger.DEBUG) + logger.debug(u'{id}: Loading show info from IMDb with ID: {imdb_id}', + id=self.indexerid, imdb_id=self.imdb_id) imdb_obj = imdb_api.get_title_by_id(self.imdb_id) # If the show has no year, IMDb returned something we don't want if not imdb_obj.year: - logger.log(u'{0}: IMDb returned invalid info for {1}, skipping update.'.format( - self.indexerid, self.imdbid), logger.DEBUG) + logger.debug(u'{id}: IMDb returned invalid info for {imdb_id}, skipping update.', + id=self.indexerid, imdb_id=self.imdbid) return self.imdb_info = { @@ -1286,8 +1269,8 @@ def load_imdb_info(self): self.externals['imdb_id'] = self.imdb_id - logger.log(u'{0}: Obtained info from IMDb: {1}'.format( - self.indexerid, self.imdb_info), logger.DEBUG) + logger.debug(u'{id}: Obtained info from IMDb: {imdb_info}', + id=self.indexerid, imdb_info=self.imdb_info) def next_episode(self): """Return the next episode air date. @@ -1295,7 +1278,7 @@ def next_episode(self): :return: :rtype: datetime.date """ - logger.log(u'{0}: Finding the episode which airs next'.format(self.indexerid), logger.DEBUG) + logger.debug(u'{id}: Finding the episode which airs next', id=self.indexerid) cur_date = datetime.date.today().toordinal() if not self.next_aired or self.next_aired and cur_date > self.next_aired: @@ -1317,13 +1300,11 @@ def next_episode(self): [self.indexerid, datetime.date.today().toordinal(), UNAIRED, WANTED]) if sql_results is None or len(sql_results) == 0: - logger.log(u'{id}: No episode found... need to implement a show status'.format - (id=self.indexerid), logger.DEBUG) + logger.debug(u'{id}: No episode found... need to implement a show status', id=self.indexerid) self.next_aired = u'' else: - logger.log(u'{id}: Found episode {ep}'.format - (id=self.indexerid, ep=episode_num(sql_results[0][b'season'], sql_results[0][b'episode'])), - logger.DEBUG) + logger.debug(u'{id}: Found episode {ep}', + id=self.indexerid, ep=episode_num(sql_results[0][b'season'], sql_results[0][b'episode'])) self.next_aired = sql_results[0][b'airdate'] return self.next_aired @@ -1351,8 +1332,8 @@ def delete_show(self, full=False): # clear the cache image_cache_dir = os.path.join(app.CACHE_DIR, 'images') for cache_file in glob.glob(os.path.join(image_cache_dir, str(self.indexerid) + '.*')): - logger.log(u'{id}: Attempt to {action} cache file {cache_file}'.format - (id=self.indexerid, action=action, cache_file=cache_file)) + logger.info(u'{id}: Attempt to {action} cache file {cache_file}', + id=self.indexerid, action=action, cache_file=cache_file) try: if app.TRASH_REMOVE_SHOW: send2trash(cache_file) @@ -1360,53 +1341,52 @@ def delete_show(self, full=False): os.remove(cache_file) except OSError as e: - logger.log(u'{id}: Unable to {action} {cache_file}: {error_msg}'.format - (id=self.indexerid, action=action, cache_file=cache_file, error_msg=ex(e)), logger.WARNING) + logger.warning(u'{id}: Unable to {action} {cache_file}: {error_msg}', + id=self.indexerid, action=action, cache_file=cache_file, error_msg=ex(e)) # remove entire show folder if full: try: - logger.log(u'{id}: Attempt to {action} show folder {location}'.format - (id=self.indexerid, action=action, location=self.location)) + logger.info(u'{id}: Attempt to {action} show folder {location}', + id=self.indexerid, action=action, location=self.location) # check first the read-only attribute file_attribute = os.stat(self.location)[0] if not file_attribute & stat.S_IWRITE: # File is read-only, so make it writeable - logger.log(u'{id}: Attempting to make writeable the read only folder {location}'.format - (id=self.indexerid, location=self.location), logger.DEBUG) + logger.debug(u'{id}: Attempting to make writeable the read only folder {location}', + id=self.indexerid, location=self.location) try: os.chmod(self.location, stat.S_IWRITE) except OSError: - logger.log(u'{id}: Unable to change permissions of {location}'.format - (id=self.indexerid, location=self.location), logger.WARNING) + logger.warning(u'{id}: Unable to change permissions of {location}', + id=self.indexerid, location=self.location) if app.TRASH_REMOVE_SHOW: send2trash(self.location) else: shutil.rmtree(self.location) - logger.log(u'{id}: {action} show folder {location}'.format - (id=self.indexerid, action=action, location=self.raw_location)) + logger.info(u'{id}: {action} show folder {location}', + id=self.indexerid, action=action, location=self.raw_location) except ShowDirectoryNotFoundException: - logger.log(u'{id}: Show folder {location} does not exist. No need to {action}'.format - (id=self.indexerid, location=self.raw_location, action=action), logger.WARNING) + logger.warning(u'{id}: Show folder {location} does not exist. No need to {action}', + id=self.indexerid, location=self.raw_location, action=action) except OSError as e: - logger.log(u'{id}: Unable to {action} {location}. Error: {error_msg}'.format - (id=self.indexerid, action=action, location=self.raw_location, error_msg=ex(e)), - logger.WARNING) + logger.warning(u'{id}: Unable to {action} {location}. Error: {error_msg}', + id=self.indexerid, action=action, location=self.raw_location, error_msg=ex(e)) if app.USE_TRAKT and app.TRAKT_SYNC_WATCHLIST: - logger.log(u'{id}: Removing show {show} from Trakt watchlist'.format - (id=self.indexerid, show=self.name), logger.DEBUG) + logger.debug(u'{id}: Removing show {show} from Trakt watchlist', + id=self.indexerid, show=self.name) notifiers.trakt_notifier.update_watchlist(self, update='remove') def populate_cache(self): """Populate image caching.""" cache_inst = image_cache.ImageCache() - logger.log(u'{id}: Checking & filling cache for show {show}'.format - (id=self.indexerid, show=self.name), logger.DEBUG) + logger.debug(u'{id}: Checking & filling cache for show {show}', + id=self.indexerid, show=self.name) cache_inst.fill_cache(self) def refresh_dir(self): @@ -1426,8 +1406,8 @@ def refresh_dir(self): self.load_episodes_from_dir() # run through all locations from DB, check that they exist - logger.log(u'{id}: Loading all episodes from {show} with a location from the database'.format - (id=self.indexerid, show=self.name), logger.DEBUG) + logger.debug(u'{id}: Loading all episodes from {show} with a location from the database', + id=self.indexerid, show=self.name) main_db_con = db.DBConnection() sql_results = main_db_con.select( @@ -1450,9 +1430,9 @@ def refresh_dir(self): if not cur_ep: raise EpisodeDeletedException except EpisodeDeletedException: - logger.log(u'{id:} Episode {show} {ep} was deleted while we were refreshing it, ' - u'moving on to the next one'.format - (id=self.indexerid, show=self.name, ep=episode_num(season, episode)), logger.DEBUG) + logger.debug(u'{id:} Episode {show} {ep} was deleted while we were refreshing it, ' + u'moving on to the next one', + id=self.indexerid, show=self.name, ep=episode_num(season, episode)) continue # if the path doesn't exist or if it's not in our show dir @@ -1472,10 +1452,10 @@ def refresh_dir(self): else: new_status = app.EP_DEFAULT_DELETED_STATUS - logger.log(u"{id}: Location for {show} {ep} doesn't exist, " - u"removing it and changing our status to '{status}'".format - (id=self.indexerid, show=self.name, ep=episode_num(season, episode), - status=statusStrings[new_status].upper()), logger.DEBUG) + logger.debug(u"{id}: Location for {show} {ep} doesn't exist, " + u"removing it and changing our status to '{status}'", + id=self.indexerid, show=self.name, ep=episode_num(season, episode), + status=statusStrings[new_status].upper()) cur_ep.status = new_status cur_ep.subtitles = '' cur_ep.subtitles_searchcount = 0 @@ -1487,13 +1467,13 @@ def refresh_dir(self): sql_l.append(cur_ep.get_sql()) - logger.log('{id}: Looking for hanging associated files for: {show} {ep} in: {location}'.format - (id=self.indexerid, show=self.name, ep=episode_num(season, episode), location=cur_loc)) + logger.info(u'{id}: Looking for hanging associated files for: {show} {ep} in: {location}', + id=self.indexerid, show=self.name, ep=episode_num(season, episode), location=cur_loc) related_files = post_processor.PostProcessor(cur_loc).list_associated_files( cur_loc, base_name_only=False, subfolders=True) if related_files: - logger.log(u'{id}: Found hanging associated files for {show} {ep}, deleting: {files}'.format + logger.warning(u'{id}: Found hanging associated files for {show} {ep}, deleting: {files}'.format (id=self.indexerid, show=self.name, ep=episode_num(season, episode), files=related_files), logger.WARNING) @@ -1501,9 +1481,10 @@ def refresh_dir(self): try: os.remove(related_file) except Exception as e: - logger.log( - u'{id}: Could not delete associated file: {related_file}. Error: {error_msg}'.format - (id=self.indexerid, related_file=related_file, error_msg=e), logger.WARNING) + logger.warning( + u'{id}: Could not delete associated file: {related_file}. Error: {error_msg}', + id=self.indexerid, related_file=related_file, error_msg=e + ) # Clean up any empty season folders after deletion of associated files helpers.delete_empty_folders(self.location) @@ -1515,33 +1496,33 @@ def refresh_dir(self): def download_subtitles(self): """Download subtitles.""" if not self.is_location_valid(): - logger.log(u"{id}: Show {show} location doesn't exist, can't download subtitles".format - (id=self.indexerid, show=self.name), logger.WARNING) + logger.warning(u"{id}: Show {show} location doesn't exist, can't download subtitles", + id=self.indexerid, show=self.name) return - logger.log(u'{id}: Downloading subtitles for {show}'.format(id=self.indexerid, show=self.name), logger.DEBUG) + logger.debug(u'{id}: Downloading subtitles for {show}', id=self.indexerid, show=self.name) try: episodes = self.get_all_episodes(has_location=True) if not episodes: - logger.log(u'{id}: No episodes to download subtitles for {show}'.format - (id=self.indexerid, show=self.name), logger.DEBUG) + logger.debug(u'{id}: No episodes to download subtitles for {show}', + id=self.indexerid, show=self.name) return for episode in episodes: episode.download_subtitles() except Exception: - logger.log(u'{id}: Error occurred when downloading subtitles for show {show}'.format - (id=self.indexerid, show=self.name), logger.WARNING) - logger.log(traceback.format_exc(), logger.ERROR) + logger.warning(u'{id}: Error occurred when downloading subtitles for show {show}', + id=self.indexerid, show=self.name) + logger.error(traceback.format_exc()) def save_to_db(self): """Save to database.""" if not self.dirty: return - logger.log(u'{id}: Saving to database: {show}'.format(id=self.indexerid, show=self.name), logger.DEBUG) + logger.debug(u'{id}: Saving to database: {show}', id=self.indexerid, show=self.name) control_value_dict = {'indexer_id': self.indexerid} new_value_dict = {'indexer': self.indexer, @@ -1780,15 +1761,15 @@ def want_episode(self, season, episode, quality, forced_search=False, download_c """ # if the quality isn't one we want under any circumstances then just say no allowed_qualities, preferred_qualities = self.current_qualities - logger.log(u'{id}: Allowed, Preferred = [ {allowed} ] [ {preferred} ] Found = [ {found} ]'.format - (id=self.indexerid, allowed=self.__qualities_to_string(allowed_qualities), - preferred=self.__qualities_to_string(preferred_qualities), - found=self.__qualities_to_string([quality])), logger.DEBUG) + logger.debug(u'{id}: Allowed, Preferred = [ {allowed} ] [ {preferred} ] Found = [ {found} ]', + id=self.indexerid, allowed=self.__qualities_to_string(allowed_qualities), + preferred=self.__qualities_to_string(preferred_qualities), + found=self.__qualities_to_string([quality])) if not Quality.wanted_quality(quality, allowed_qualities, preferred_qualities): - logger.log(u"{id}: Ignoring found result for '{show}' {ep} with unwanted quality '{quality}'".format - (id=self.indexerid, show=self.name, ep=episode_num(season, episode), - quality=Quality.qualityStrings[quality]), logger.DEBUG) + logger.debug(u"{id}: Ignoring found result for '{show}' {ep} with unwanted quality '{quality}'", + id=self.indexerid, show=self.name, ep=episode_num(season, episode), + quality=Quality.qualityStrings[quality]) return False main_db_con = db.DBConnection() @@ -1804,10 +1785,10 @@ def want_episode(self, season, episode, quality, forced_search=False, download_c b' AND episode = ?', [self.indexerid, season, episode]) if not sql_results or not len(sql_results): - logger.log(u'{id}: Unable to find a matching episode in database. ' - u"Ignoring found result for '{show}' {ep} with quality '{quality}'".format - (id=self.indexerid, show=self.name, ep=episode_num(season, episode), - quality=Quality.qualityStrings[quality]), logger.DEBUG) + logger.debug(u'{id}: Unable to find a matching episode in database. ' + u"Ignoring found result for '{show}' {ep} with quality '{quality}'", + id=self.indexerid, show=self.name, ep=episode_num(season, episode), + quality=Quality.qualityStrings[quality]) return False ep_status = int(sql_results[0][b'status']) @@ -1817,19 +1798,18 @@ def want_episode(self, season, episode, quality, forced_search=False, download_c # if it's one of these then we want it as long as it's in our allowed initial qualities if ep_status == WANTED: - logger.log(u"{id}: '{show}' {ep} status is 'WANTED'. Accepting result with quality '{new_quality}'".format - (id=self.indexerid, status=ep_status_text, show=self.name, ep=episode_num(season, episode), - new_quality=Quality.qualityStrings[quality]), logger.DEBUG) + logger.debug(u"{id}: '{show}' {ep} status is 'WANTED'. Accepting result with quality '{new_quality}'", + id=self.indexerid, status=ep_status_text, show=self.name, ep=episode_num(season, episode), + new_quality=Quality.qualityStrings[quality]) return True should_replace, msg = Quality.should_replace(ep_status, cur_quality, quality, allowed_qualities, preferred_qualities, download_current_quality, forced_search, manually_searched) - logger.log(u"{id}: '{show}' {ep} status is: '{status}'. {action} result with quality '{new_quality}'. " - u"Reason: {msg}".format - (id=self.indexerid, show=self.name, ep=episode_num(season, episode), - status=ep_status_text, action='Accepting' if should_replace else 'Ignoring', - new_quality=Quality.qualityStrings[quality], msg=msg), logger.DEBUG) + logger.debug(u"{id}: '{show}' {ep} status is: '{status}'. {action} result with quality '{new_quality}'. " + u"Reason: {msg}", id=self.indexerid, show=self.name, ep=episode_num(season, episode), + status=ep_status_text, action='Accepting' if should_replace else 'Ignoring', + new_quality=Quality.qualityStrings[quality], msg=msg) return should_replace def get_overview(self, ep_status, backlog_mode=False, manually_searched=False): @@ -1875,8 +1855,7 @@ def get_overview(self, ep_status, backlog_mode=False, manually_searched=False): else: return Overview.GOOD else: - logger.log(u'Could not parse episode status into a valid overview status: {status}'.format - (status=ep_status), logger.ERROR) + logger.error(u'Could not parse episode status into a valid overview status: {status}', status=ep_status) def get_backlogged_episodes(self, allowed_qualities, preferred_qualities, include_wanted=False): """Check how many episodes will be backlogged when changing qualities.""" @@ -1922,9 +1901,9 @@ def set_all_episodes_archived(self, final_status_only=False): if sql_list: main_db_con = db.DBConnection() main_db_con.mass_action(sql_list) - logger.log(u'Change all DOWNLOADED episodes to ARCHIVED ' - u'for show ID: {show}'.format(show=self.name), logger.DEBUG) + logger.debug(u'Change all DOWNLOADED episodes to ARCHIVED ' + u'for show ID: {show}', show=self.name) return True else: - logger.log(u'No DOWNLOADED episodes for show ID: {show}'.format(show=self.name), logger.DEBUG) + logger.debug(u'No DOWNLOADED episodes for show ID: {show}', show=self.name) return False From 918bdb8277081f1d1e028ef3607d571d46fd86ce Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sat, 18 Feb 2017 13:23:27 +0100 Subject: [PATCH 003/344] Added unicode_literals from future. * Fixed some of the logger indents. * Fixed some of the byte strings for db queries. --- medusa/tv/series.py | 50 ++++++++++++++++++++++++++++++--------------- 1 file changed, 34 insertions(+), 16 deletions(-) diff --git a/medusa/tv/series.py b/medusa/tv/series.py index 3ab8e1f832..db6a6869a5 100644 --- a/medusa/tv/series.py +++ b/medusa/tv/series.py @@ -1,5 +1,23 @@ # coding=utf-8 -"""Series and Episode classes.""" +# Author: Nic Wolfe +# +# This file is part of Medusa. +# +# Medusa is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Medusa is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Medusa. If not, see . +"""Series classes.""" + +from __future__ import unicode_literals import copy import datetime @@ -328,7 +346,7 @@ def get_all_seasons(self, last_airdate=False): main_db_con = db.DBConnection() results = main_db_con.select(sql_selection, [self.indexerid]) - return {int(x['season']): int(x['number_of_episodes']) for x in results} + return {int(x[b'season']): int(x[b'number_of_episodes']) for x in results} def get_all_episodes(self, season=None, has_location=False): """Retrieve all episodes for this show given the specified filter. @@ -447,7 +465,7 @@ def get_episode(self, season=None, episode=None, filepath=None, no_create=False, episode = int(sql_results[0][b'episode']) season = int(sql_results[0][b'season']) logger.debug(u'{id}: Found season and episode which is {show} {ep}', - id=self.indexerid, show=self.name, ep=episode_num(season, episode)) + id=self.indexerid, show=self.name, ep=episode_num(season, episode)) elif len(sql_results) > 1: logger.error(u'{id}: Multiple entries found in show: {show} ', id=self.indexerid, show=self.name) @@ -780,8 +798,8 @@ def load_episodes_from_db(self, seasons=None): scanned_eps[cur_season][cur_episode] = True except EpisodeDeletedException: logger.debug(u'{id}: Tried loading {show} {ep} from the DB that should have been deleted, ' - u'skipping it', id=cur_show_id, show=cur_show_name, - ep=episode_num(cur_season, cur_episode)) + u'skipping it', id=cur_show_id, show=cur_show_name, + ep=episode_num(cur_season, cur_episode)) continue logger.debug(u'{id}: Finished loading all episodes for {show} from the DB', show=cur_show_name, id=cur_show_id) @@ -898,8 +916,8 @@ def _save_externals_to_db(self): for external in self.externals: if external in reverse_mappings and self.externals[external]: sql_l.append([b'INSERT OR IGNORE ' - 'INTO indexer_mapping (indexer_id, indexer, mindexer_id, mindexer) ' - 'VALUES (?,?,?,?)', + b'INTO indexer_mapping (indexer_id, indexer, mindexer_id, mindexer) ' + b'VALUES (?,?,?,?)', [self.indexerid, self.indexer, self.externals[external], @@ -987,7 +1005,7 @@ def make_ep_from_file(self, filepath): return None logger.debug(u'{indexer_id}: Creating episode object from {filepath}', - indexer_id=self.indexerid, filepath=filepath) + indexer_id=self.indexerid, filepath=filepath) try: parse_result = NameParser(show=self, try_indexers=True, parse_method=( @@ -999,9 +1017,9 @@ def make_ep_from_file(self, filepath): episodes = [ep for ep in parse_result.episode_numbers if ep is not None] if not episodes: logger.debug(u'{indexerid}: parse_result: {parse_result}', - indexerid=self.indexerid, parse_result=parse_result) + indexerid=self.indexerid, parse_result=parse_result) logger.debug(u'{indexerid}: No episode number found in {filepath}, ignoring it', - indexerid=self.indexerid, filepath=filepath) + indexerid=self.indexerid, filepath=filepath) return None # for now lets assume that any episode in the show dir belongs to that show @@ -1120,7 +1138,7 @@ def _load_from_db(self): if self.status is None: self.status = 'Unknown' - self.airs = sql_results[0]['airs'] + self.airs = sql_results[0][b'airs'] if self.airs is None or not network_timezones.test_timeformat(self.airs): self.airs = '' @@ -1468,15 +1486,14 @@ def refresh_dir(self): sql_l.append(cur_ep.get_sql()) logger.info(u'{id}: Looking for hanging associated files for: {show} {ep} in: {location}', - id=self.indexerid, show=self.name, ep=episode_num(season, episode), location=cur_loc) + id=self.indexerid, show=self.name, ep=episode_num(season, episode), location=cur_loc) related_files = post_processor.PostProcessor(cur_loc).list_associated_files( cur_loc, base_name_only=False, subfolders=True) if related_files: - logger.warning(u'{id}: Found hanging associated files for {show} {ep}, deleting: {files}'.format - (id=self.indexerid, show=self.name, ep=episode_num(season, episode), - files=related_files), - logger.WARNING) + logger.warning(u'{id}: Found hanging associated files for {show} {ep}, deleting: {files}', + id=self.indexerid, show=self.name, ep=episode_num(season, episode), + files=related_files) for related_file in related_files: try: os.remove(related_file) @@ -1512,6 +1529,7 @@ def download_subtitles(self): for episode in episodes: episode.download_subtitles() + # TODO: Change into a non catch all exception. except Exception: logger.warning(u'{id}: Error occurred when downloading subtitles for show {show}', id=self.indexerid, show=self.name) From ed05f0ef3c51ac75414a27e891ef419e66efaec3 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sat, 18 Feb 2017 13:25:17 +0100 Subject: [PATCH 004/344] Changed old style logger to new style for episode.py. * Added unicode_literals from future. * Fixed some byte strings for db calls. --- medusa/tv/episode.py | 354 +++++++++++++++++++++---------------------- 1 file changed, 176 insertions(+), 178 deletions(-) diff --git a/medusa/tv/episode.py b/medusa/tv/episode.py index 830635cdf1..cc52327bc6 100644 --- a/medusa/tv/episode.py +++ b/medusa/tv/episode.py @@ -15,9 +15,12 @@ # # You should have received a copy of the GNU General Public License # along with Medusa. If not, see . -"""Series and Episode classes.""" +"""Episode classes.""" + +from __future__ import unicode_literals import datetime +import logging import os.path import re import shutil @@ -32,7 +35,6 @@ app, db, helpers, - logger, network_timezones, notifiers, post_processor, @@ -98,6 +100,8 @@ MILLIS_YEAR_1900 = datetime.datetime(year=1900, month=1, day=1).toordinal() +logger = logging.getLogger(__name__) + class Episode(TV): """Represent a TV Show episode.""" @@ -175,7 +179,7 @@ def from_filepath(filepath): return episode # only root episode has related_episodes except (InvalidNameException, InvalidShowException): - logger.log(u'Cannot create Episode from path {path}'.format(path=filepath), logger.WARNING) + logger.warning('Cannot create Episode from path {path}', path=filepath) @property def identifier(self): @@ -202,8 +206,8 @@ def location(self): @location.setter def location(self, value): - logger.log(u'{id}: Setter sets location to {location}'.format - (id=self.show.indexerid, location=value), logger.DEBUG) + logger.debug('{id}: Setter sets location to {location}', + id=self.show.indexerid, location=value) self._location = value self.file_size = os.path.getsize(value) if value and self.is_location_valid(value) else 0 @@ -227,13 +231,13 @@ def refresh_subtitles(self): ep_num = (episode_num(self.season, self.episode) or episode_num(self.season, self.episode, numbering='absolute')) if self.subtitles == current_subtitles: - logger.log(u'{id}: No changed subtitles for {show} {ep}. Current subtitles: {subs}'.format - (id=self.show.indexerid, show=self.show.name, ep=ep_num, subs=current_subtitles), logger.DEBUG) + logger.debug('{id}: No changed subtitles for {show} {ep}. Current subtitles: {subs}', + id=self.show.indexerid, show=self.show.name, ep=ep_num, subs=current_subtitles) else: - logger.log(u'{id}: Subtitle changes detected for this show {show} {ep}. Current subtitles: {subs}'.format - (id=self.show.indexerid, show=self.show.name, ep=ep_num, subs=current_subtitles), logger.DEBUG) + logger.debug('{id}: Subtitle changes detected for this show {show} {ep}. Current subtitles: {subs}', + id=self.show.indexerid, show=self.show.name, ep=ep_num, subs=current_subtitles) self.subtitles = current_subtitles if current_subtitles else [] - logger.log(u'{id}: Saving subtitles changes to database'.format(id=self.show.indexerid), logger.DEBUG) + logger.debug('{id}: Saving subtitles changes to database', id=self.show.indexerid) self.save_to_db() def download_subtitles(self, lang=None): @@ -243,11 +247,10 @@ def download_subtitles(self, lang=None): :type lang: string """ if not self.is_location_valid(): - logger.log(u"{id}: {show} {ep} file doesn't exist, can't download subtitles".format - (id=self.show.indexerid, show=self.show.name, - ep=(episode_num(self.season, self.episode) or episode_num(self.season, self.episode, - numbering='absolute'))), - logger.DEBUG) + logger.debug("{id}: {show} {ep} file doesn't exist, can't download subtitles", + id=self.show.indexerid, show=self.show.name, + ep=(episode_num(self.season, self.episode) or episode_num(self.season, self.episode, + numbering='absolute'))) return new_subtitles = subtitles.download_subtitles(self, lang=lang) @@ -256,22 +259,22 @@ def download_subtitles(self, lang=None): self.subtitles_searchcount += 1 if self.subtitles_searchcount else 1 self.subtitles_lastsearch = datetime.datetime.now().strftime(dateTimeFormat) - logger.log(u'{id}: Saving last subtitles search to database'.format(id=self.show.indexerid), logger.DEBUG) + logger.debug('{id}: Saving last subtitles search to database', id=self.show.indexerid) self.save_to_db() if new_subtitles: subtitle_list = ', '.join([subtitles.name_from_code(code) for code in new_subtitles]) - logger.log(u'{id}: Downloaded {subs} subtitles for {show} {ep}'.format - (id=self.show.indexerid, subs=subtitle_list, show=self.show.name, + logger.info('{id}: Downloaded {subs} subtitles for {show} {ep}', + id=self.show.indexerid, subs=subtitle_list, show=self.show.name, ep=(episode_num(self.season, self.episode) or - episode_num(self.season, self.episode, numbering='absolute')))) + episode_num(self.season, self.episode, numbering='absolute'))) notifiers.notify_subtitle_download(self.pretty_name(), subtitle_list) else: - logger.log(u'{id}: No subtitles found for {show} {ep}'.format - (id=self.show.indexerid, show=self.show.name, + logger.info('{id}: No subtitles found for {show} {ep}', + id=self.show.indexerid, show=self.show.name, ep=(episode_num(self.season, self.episode) or - episode_num(self.season, self.episode, numbering='absolute')))) + episode_num(self.season, self.episode, numbering='absolute'))) return new_subtitles @@ -319,9 +322,8 @@ def _specify_episode(self, season, episode): try: self.__load_from_nfo(self.location) except NoNFOException: - logger.log(u'{id}: There was an error loading the NFO for episode {show} {ep}'.format - (id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode)), - logger.ERROR) + logger.error('{id}: There was an error loading the NFO for episode {show} {ep}', + id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode)) # if we tried loading it from NFO and didn't find the NFO, try the Indexers if not self.hasnfo: @@ -332,7 +334,7 @@ def _specify_episode(self, season, episode): # if we failed SQL *and* NFO, Indexers then fail if not result: - raise EpisodeNotFoundException(u"{id}: Couldn't find episode {show} {ep}".format + raise EpisodeNotFoundException("{id}: Couldn't find episode {show} {ep}".format (id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode))) @@ -362,9 +364,8 @@ def load_from_db(self, season, episode): if len(sql_results) > 1: raise MultipleEpisodesInDatabaseException('Your DB has two records for the same show somehow.') elif not sql_results: - logger.log(u'{id}: {show} {ep} not found in the database'.format - (id=self.show.indexerid, show=self.show.name, ep=episode_num(self.season, self.episode)), - logger.DEBUG) + logger.debug('{id}: {show} {ep} not found in the database', + id=self.show.indexerid, show=self.show.name, ep=episode_num(self.season, self.episode)) return False else: if sql_results[0][b'name']: @@ -455,42 +456,41 @@ def load_from_indexer(self, season=None, episode=None, tvapi=None, cached_season my_ep = show[season][episode] except (IndexerError, IOError) as e: - logger.log(u'{id}: {indexer} threw up an error: {error_msg}'.format - (id=self.show.indexerid, indexer=indexerApi(self.indexer).name, error_msg=ex(e)), - logger.WARNING) + logger.warning('{id}: {indexer} threw up an error: {error_msg}', + id=self.show.indexerid, indexer=indexerApi(self.indexer).name, error_msg=ex(e)), + # if the episode is already valid just log it, if not throw it up if self.name: - logger.log( - u'{id}: {indexer} timed out but we have enough info from other sources, allowing the error'.format - (id=self.show.indexerid, indexer=indexerApi(self.indexer).name), logger.DEBUG) + logger.debug( + '{id}: {indexer} timed out but we have enough info from other sources, allowing the error', + id=self.show.indexerid, indexer=indexerApi(self.indexer).name) return else: - logger.log(u'{id}: {indexer} timed out, unable to create the episode'.format - (id=self.show.indexerid, indexer=indexerApi(self.indexer).name), logger.WARNING) + logger.warning('{id}: {indexer} timed out, unable to create the episode', + id=self.show.indexerid, indexer=indexerApi(self.indexer).name) return False except (IndexerEpisodeNotFound, IndexerSeasonNotFound): - logger.log(u'{id}: Unable to find the episode on {indexer}. Deleting it from db'.format - (id=self.show.indexerid, indexer=indexerApi(self.indexer).name), logger.DEBUG) + logger.debug('{id}: Unable to find the episode on {indexer}. Deleting it from db', + id=self.show.indexerid, indexer=indexerApi(self.indexer).name) # if I'm no longer on the Indexers but I once was then delete myself from the DB if self.indexerid != -1: self.delete_episode() return if getattr(my_ep, 'episodename', None) is None: - logger.log(u'{id}: {show} {ep} has no name on {indexer}. Setting to an empty string'.format - (id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), - indexer=indexerApi(self.indexer).name)) + logger.info('{id}: {show} {ep} has no name on {indexer}. Setting to an empty string', + id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), + indexer=indexerApi(self.indexer).name) setattr(my_ep, 'episodename', '') if getattr(my_ep, 'absolute_number', None) is None: - logger.log(u'{id}: {show} {ep} has no absolute number on {indexer}'.format - (id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), - indexer=indexerApi(self.indexer).name), logger.DEBUG) + logger.debug('{id}: {show} {ep} has no absolute number on {indexer}', + id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), + indexer=indexerApi(self.indexer).name) else: - logger.log(u'{id}: {show} {ep} has absolute number: {absolute} '.format - (id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), - absolute=my_ep['absolute_number']), - logger.DEBUG) + logger.debug('{id}: {show} {ep} has absolute number: {absolute} ', + id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), + absolute=my_ep['absolute_number']) self.absolute_number = int(my_ep['absolute_number']) self.name = getattr(my_ep, 'episodename', '') @@ -521,9 +521,9 @@ def load_from_indexer(self, season=None, episode=None, tvapi=None, cached_season try: self.airdate = datetime.date(raw_airdate[0], raw_airdate[1], raw_airdate[2]) except (ValueError, IndexError): - logger.log(u'{id}: Malformed air date of {aired} retrieved from {indexer} for {show} {ep}'.format - (id=self.show.indexerid, aired=firstaired, indexer=indexerApi(self.indexer).name, - show=self.show.name, ep=episode_num(season, episode)), logger.WARNING) + logger.warning('{id}: Malformed air date of {aired} retrieved from {indexer} for {show} {ep}', + id=self.show.indexerid, aired=firstaired, indexer=indexerApi(self.indexer).name, + show=self.show.name, ep=episode_num(season, episode)) # if I'm incomplete on the indexer but I once was complete then just delete myself from the DB for now if self.indexerid != -1: self.delete_episode() @@ -532,8 +532,8 @@ def load_from_indexer(self, season=None, episode=None, tvapi=None, cached_season # early conversion to int so that episode doesn't get marked dirty self.indexerid = getattr(my_ep, 'id', None) if self.indexerid is None: - logger.log(u'{id}: Failed to retrieve ID from {indexer}'.format - (id=self.show.indexerid, indexer=indexerApi(self.indexer).name), logger.ERROR) + logger.error('{id}: Failed to retrieve ID from {indexer}', + id=self.show.indexerid, indexer=indexerApi(self.indexer).name) if self.indexerid != -1: self.delete_episode() return False @@ -542,15 +542,14 @@ def load_from_indexer(self, season=None, episode=None, tvapi=None, cached_season if all([not self.show.is_location_valid(), not app.CREATE_MISSING_SHOW_DIRS, not app.ADD_SHOWS_WO_DIR]): - logger.log(u"{id}: Show {show} location '{location}' is missing. Keeping current episode statuses" - .format(id=self.show.indexerid, show=self.show.name, location=self.show.raw_location), - logger.WARNING) + logger.warning("{id}: Show {show} location '{location}' is missing. Keeping current episode statuses", + id=self.show.indexerid, show=self.show.name, location=self.show.raw_location) return if self.location: - logger.log(u"{id}: {show} {ep} has status '{status}' and location {location}".format - (id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), - status=statusStrings[self.status].upper(), location=self.location), logger.DEBUG) + logger.debug("{id}: {show} {ep} has status '{status}' and location {location}", + id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), + status=statusStrings[self.status].upper(), location=self.location) if not os.path.isfile(self.location): if (self.airdate >= datetime.date.today() or self.airdate == datetime.date.fromordinal(1)) and \ @@ -560,22 +559,22 @@ def load_from_indexer(self, season=None, episode=None, tvapi=None, cached_season # If is a leaked episode and user manually snatched, it will respect status # If is a fake (manually snatched), when user set as FAILED, status will be WANTED # and code below will make it UNAIRED again - logger.log(u"{id}: {show} {ep} airs in the future or has no airdate, marking it '{status}'".format - (id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), - status=statusStrings[UNAIRED].upper()), logger.DEBUG) + logger.debug("{id}: {show} {ep} airs in the future or has no airdate, marking it '{status}'", + id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), + status=statusStrings[UNAIRED].upper()) self.status = UNAIRED elif self.status in (UNAIRED, UNKNOWN): # Only do UNAIRED/UNKNOWN, it could already be snatched/ignored/skipped, # or downloaded/archived to disconnected media new_status = self.show.default_ep_status if self.season > 0 else SKIPPED # auto-skip specials - logger.log(u"{id}: {show} {ep} has already aired, marking it '{status}'".format - (id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), - status=statusStrings[new_status].upper()), logger.DEBUG) + logger.debug("{id}: {show} {ep} has already aired, marking it '{status}'", + id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), + status=statusStrings[new_status].upper()) self.status = new_status else: - logger.log(u"{id}: {show} {ep} status untouched: '{status}'".format - (id=self.show.indexerid, show=self.show.name, - ep=episode_num(season, episode), status=statusStrings[self.status].upper()), logger.DEBUG) + logger.debug("{id}: {show} {ep} status untouched: '{status}'", + id=self.show.indexerid, show=self.show.name, + ep=episode_num(season, episode), status=statusStrings[self.status].upper()) # We only change the episode's status if a file exists and the status is not SNATCHED|DOWNLOADED|ARCHIVED elif helpers.is_media_file(self.location): @@ -583,33 +582,33 @@ def load_from_indexer(self, season=None, episode=None, tvapi=None, cached_season Quality.ARCHIVED + Quality.SNATCHED_BEST: old_status = self.status self.status = Quality.status_from_name(self.location, anime=self.show.is_anime) - logger.log(u"{id}: {show} {ep} status changed from '{old_status}' to '{new_status}' " - u"as current status is not SNATCHED|DOWNLOADED|ARCHIVED".format - (id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), - old_status=statusStrings[old_status].upper(), - new_status=statusStrings[self.status].upper()), logger.DEBUG) + logger.debug("{id}: {show} {ep} status changed from '{old_status}' to '{new_status}' " + "as current status is not SNATCHED|DOWNLOADED|ARCHIVED", + id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), + old_status=statusStrings[old_status].upper(), + new_status=statusStrings[self.status].upper()) else: - logger.log(u"{id}: {show} {ep} status untouched: '{status}'".format - (id=self.show.indexerid, show=self.show.name, - ep=episode_num(season, episode), status=statusStrings[self.status].upper()), logger.DEBUG) + logger.debug("{id}: {show} {ep} status untouched: '{status}'", + id=self.show.indexerid, show=self.show.name, + ep=episode_num(season, episode), status=statusStrings[self.status].upper()) # shouldn't get here probably else: - logger.log(u"{id}: {show} {ep} status changed from '{old_status}' to 'UNKNOWN'".format - (id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), - old_status=statusStrings[self.status].upper()), logger.WARNING) + logger.warning("{id}: {show} {ep} status changed from '{old_status}' to 'UNKNOWN'", + id=self.show.indexerid, show=self.show.name, ep=episode_num(season, episode), + old_status=statusStrings[self.status].upper()) self.status = UNKNOWN def __load_from_nfo(self, location): if not self.show.is_location_valid(): - logger.log(u'{id}: The show location {location} is missing, unable to load metadata'.format - (id=self.show.indexerid, location=location), logger.WARNING) + logger.warning('{id}: The show location {location} is missing, unable to load metadata', + id=self.show.indexerid, location=location) return - logger.log(u'{id}: Loading episode details from the NFO file associated with {location}'.format - (id=self.show.indexerid, location=location), logger.DEBUG) + logger.debug('{id}: Loading episode details from the NFO file associated with {location}', + id=self.show.indexerid, location=location) self.location = location @@ -617,36 +616,36 @@ def __load_from_nfo(self, location): if self.status == UNKNOWN and helpers.is_media_file(self.location): self.status = Quality.status_from_name(self.location, anime=self.show.is_anime) - logger.log(u"{id}: {show} {ep} status changed from 'UNKNOWN' to '{new_status}'".format - (id=self.show.indexerid, show=self.show.name, ep=episode_num(self.season, self.episode), - new_status=self.status), logger.DEBUG) + logger.debug("{id}: {show} {ep} status changed from 'UNKNOWN' to '{new_status}'", + id=self.show.indexerid, show=self.show.name, ep=episode_num(self.season, self.episode), + new_status=self.status) nfo_file = replace_extension(self.location, 'nfo') - logger.log(u'{id}: Using NFO name {nfo}'.format(id=self.show.indexerid, nfo=nfo_file), logger.DEBUG) + logger.debug('{id}: Using NFO name {nfo}', id=self.show.indexerid, nfo=nfo_file) if os.path.isfile(nfo_file): try: show_xml = ETree.ElementTree(file=nfo_file) except (SyntaxError, ValueError) as e: - logger.log(u'{id}: Error loading the NFO, backing up the NFO and skipping for now: '.format - (id=self.show.indexerid, error_msg=ex(e)), logger.ERROR) + logger.error('{id}: Error loading the NFO, backing up the NFO and skipping for now: {error_msg}', + id=self.show.indexerid, error_msg=ex(e)) try: os.rename(nfo_file, nfo_file + '.old') except Exception as e: - logger.log(u"{id}: Failed to rename your episode's NFO file. " - u'You need to delete it or fix it: {error_msg}'.format - (id=self.show.indexerid, error_msg=ex(e)), logger.WARNING) + logger.warning("{id}: Failed to rename your episode's NFO file. " + 'You need to delete it or fix it: {error_msg}', + id=self.show.indexerid, error_msg=ex(e)) raise NoNFOException('Error in NFO format') for ep_details in list(show_xml.iter('episodedetails')): if (ep_details.findtext('season') is None or int(ep_details.findtext('season')) != self.season or ep_details.findtext('episode') is None or int(ep_details.findtext('episode')) != self.episode): - logger.log(u'{id}: NFO has an block for a different episode - ' - u'wanted {ep_wanted} but got {ep_found}'.format - (id=self.show.indexerid, ep_wanted=episode_num(self.season, self.episode), - ep_found=episode_num(ep_details.findtext('season'), - ep_details.findtext('episode'))), logger.DEBUG) + logger.debug('{id}: NFO has an block for a different episode - ' + 'wanted {ep_wanted} but got {ep_found}', + id=self.show.indexerid, ep_wanted=episode_num(self.season, self.episode), + ep_found=episode_num(ep_details.findtext('season'), + ep_details.findtext('episode'))) continue if ep_details.findtext('title') is None or ep_details.findtext('aired') is None: @@ -690,17 +689,17 @@ def __str__(self): :return: :rtype: unicode """ - result = u'' - result += u'%r - %r - %r\n' % (self.show.name, episode_num(self.season, self.episode), self.name) - result += u'location: %r\n' % self.location - result += u'description: %r\n' % self.description - result += u'subtitles: %r\n' % u','.join(self.subtitles) - result += u'subtitles_searchcount: %r\n' % self.subtitles_searchcount - result += u'subtitles_lastsearch: %r\n' % self.subtitles_lastsearch - result += u'airdate: %r (%r)\n' % (self.airdate.toordinal(), self.airdate) - result += u'hasnfo: %r\n' % self.hasnfo - result += u'hastbn: %r\n' % self.hastbn - result += u'status: %r\n' % self.status + result = '' + result += '%r - %r - %r\n' % (self.show.name, episode_num(self.season, self.episode), self.name) + result += 'location: %r\n' % self.location + result += 'description: %r\n' % self.description + result += 'subtitles: %r\n' % ','.join(self.subtitles) + result += 'subtitles_searchcount: %r\n' % self.subtitles_searchcount + result += 'subtitles_lastsearch: %r\n' % self.subtitles_lastsearch + result += 'airdate: %r (%r)\n' % (self.airdate.toordinal(), self.airdate) + result += 'hasnfo: %r\n' % self.hasnfo + result += 'hastbn: %r\n' % self.hastbn + result += 'status: %r\n' % self.status return result def to_json(self, detailed=True): @@ -752,8 +751,8 @@ def to_json(self, detailed=True): def create_meta_files(self): """Create episode metadata files.""" if not self.show.is_location_valid(): - logger.log(u'{id}: The show dir is missing, unable to create metadata'.format(id=self.show.indexerid), - logger.WARNING) + logger.warning('{id}: The show dir is missing, unable to create metadata', id=self.show.indexerid), + return for metadata_provider in app.metadata_provider_dict.values(): @@ -761,7 +760,7 @@ def create_meta_files(self): self.__create_thumbnail(metadata_provider) if self.check_for_meta_files(): - logger.log(u'{id}: Saving metadata changes to database'.format(id=self.show.indexerid), logger.DEBUG) + logger.debug('{id}: Saving metadata changes to database', id=self.show.indexerid) self.save_to_db() def __create_nfo(self, metadata_provider): @@ -786,19 +785,19 @@ def __create_thumbnail(self, metadata_provider): def delete_episode(self): """Delete episode from database.""" - logger.log(u'{id}: Deleting {show} {ep} from the DB'.format - (id=self.show.indexerid, show=self.show.name, - ep=episode_num(self.season, self.episode)), logger.DEBUG) + logger.debug('{id}: Deleting {show} {ep} from the DB', + id=self.show.indexerid, show=self.show.name, + ep=episode_num(self.season, self.episode)) # remove myself from the show dictionary if self.show.get_episode(self.season, self.episode, no_create=True) == self: - logger.log(u"{id}: Removing myself from my show's list".format - (id=self.show.indexerid), logger.DEBUG) + logger.debug("{id}: Removing myself from my show's list", + id=self.show.indexerid) del self.show.episodes[self.season][self.episode] # delete myself from the DB - logger.log(u'{id}: Deleting myself from the database'.format - (id=self.show.indexerid), logger.DEBUG) + logger.debug('{id}: Deleting myself from the database', + id=self.show.indexerid) main_db_con = db.DBConnection() sql = b'DELETE FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?' main_db_con.action(sql, [self.show.indexerid, self.season, self.episode]) @@ -809,8 +808,8 @@ def get_sql(self): """Create SQL queue for this episode if any of its data has been changed since the last save.""" try: if not self.dirty: - logger.log(u'{id}: Not creating SQL queue - record is not dirty'.format - (id=self.show.indexerid), logger.DEBUG) + logger.debug('{id}: Not creating SQL queue - record is not dirty', + id=self.show.indexerid) return main_db_con = db.DBConnection() @@ -938,40 +937,39 @@ def get_sql(self): self.release_name, self.is_proper, self.show.indexerid, self.season, self.episode, self.absolute_number, self.version, self.release_group]] except Exception as e: - logger.log(u'{id}: Error while updating database: {error_msg}'.format - (id=self.show.indexerid, error_msg=repr(e)), logger.ERROR) + logger.error('{id}: Error while updating database: {error_msg}', id=self.show.indexerid, error_msg=repr(e)) def save_to_db(self): """Save this episode to the database if any of its data has been changed since the last save.""" if not self.dirty: return - new_value_dict = {'indexerid': self.indexerid, - 'indexer': self.indexer, - 'name': self.name, - 'description': self.description, - 'subtitles': ','.join(self.subtitles), - 'subtitles_searchcount': self.subtitles_searchcount, - 'subtitles_lastsearch': self.subtitles_lastsearch, - 'airdate': self.airdate.toordinal(), - 'hasnfo': self.hasnfo, - 'hastbn': self.hastbn, - 'status': self.status, - 'location': self.location, - 'file_size': self.file_size, - 'release_name': self.release_name, - 'is_proper': self.is_proper, - 'absolute_number': self.absolute_number, - 'version': self.version, - 'release_group': self.release_group} - - control_value_dict = {'showid': self.show.indexerid, - 'season': self.season, - 'episode': self.episode} + new_value_dict = {b'indexerid': self.indexerid, + b'indexer': self.indexer, + b'name': self.name, + b'description': self.description, + b'subtitles': ','.join(self.subtitles), + b'subtitles_searchcount': self.subtitles_searchcount, + b'subtitles_lastsearch': self.subtitles_lastsearch, + b'airdate': self.airdate.toordinal(), + b'hasnfo': self.hasnfo, + b'hastbn': self.hastbn, + b'status': self.status, + b'location': self.location, + b'file_size': self.file_size, + b'release_name': self.release_name, + b'is_proper': self.is_proper, + b'absolute_number': self.absolute_number, + b'version': self.version, + b'release_group': self.release_group} + + control_value_dict = {b'showid': self.show.indexerid, + b'season': self.season, + b'episode': self.episode} # use a custom update/insert method to get the data into the DB main_db_con = db.DBConnection() - main_db_con.upsert('tv_episodes', new_value_dict, control_value_dict) + main_db_con.upsert(b'tv_episodes', new_value_dict, control_value_dict) self.loaded = False self.reset_dirty() @@ -1071,7 +1069,7 @@ def release_group(show, name): try: parse_result = NameParser(show=show, naming_pattern=True).parse(name) except (InvalidNameException, InvalidShowException) as e: - logger.log(u'Unable to parse release_group: {error_msg}'.format(error_msg=ex(e)), logger.DEBUG) + logger.debug('Unable to parse release_group: {error_msg}', error_msg=ex(e)) return '' if not parse_result.release_group: @@ -1115,7 +1113,7 @@ def release_group(show, name): # try to get the release encoder to comply with scene naming standards encoder = Quality.scene_quality_from_name(self.release_name.replace(rel_grp[relgrp], ''), ep_qual) if encoder: - logger.log(u'Found codec for {show} {ep}'.format(show=show_name, ep=ep_name), logger.DEBUG) + logger.debug('Found codec for {show} {ep}', show=show_name, ep=ep_name) return { '%SN': show_name, @@ -1210,8 +1208,8 @@ def _format_pattern(self, pattern=None, multi=None, anime_type=None): # if there's no release group in the db, let the user know we replaced it if replace_map['%RG'] and replace_map['%RG'] != app.UNKNOWN_RELEASE_GROUP: if not hasattr(self, 'release_group') or not self.release_group: - logger.log(u'{id}: Episode has no release group, replacing it with {rg}'.format - (id=self.show.indexerid, rg=replace_map['%RG']), logger.DEBUG) + logger.debug('{id}: Episode has no release group, replacing it with {rg}', + id=self.show.indexerid, rg=replace_map['%RG']) self.release_group = replace_map['%RG'] # if release_group is not in the db, put it there # if there's no release name then replace it with a reasonable facsimile @@ -1342,8 +1340,8 @@ def _format_pattern(self, pattern=None, multi=None, anime_type=None): result_name = self.__format_string(result_name, replace_map) - logger.log(u'{id}: Formatting pattern: {pattern} -> {result_name}'.format - (id=self.show.indexerid, pattern=pattern, result_name=result_name), logger.DEBUG) + logger.debug('{id}: Formatting pattern: {pattern} -> {result_name}', + id=self.show.indexerid, pattern=pattern, result_name=result_name) return result_name @@ -1429,8 +1427,8 @@ def formatted_filename(self, pattern=None, multi=None, anime_type=None): def rename(self): """Rename an episode file and all related files to the location and filename as specified in naming settings.""" if not self.is_location_valid(): - logger.log(u"{id} Can't perform rename on {location} when it doesn't exist, skipping".format - (id=self.indexerid, location=self.location), logger.WARNING) + logger.warning("{id} Can't perform rename on {location} when it doesn't exist, skipping", + id=self.indexerid, location=self.location) return proper_path = self.proper_path() @@ -1445,13 +1443,13 @@ def rename(self): if absolute_current_path_no_ext.startswith(self.show.location): current_path = absolute_current_path_no_ext[len(self.show.location):] - logger.log(u'{id}: Renaming/moving episode from the base path {location} to {new_location}'.format - (id=self.indexerid, location=self.location, new_location=absolute_proper_path), logger.DEBUG) + logger.debug('{id}: Renaming/moving episode from the base path {location} to {new_location}', + id=self.indexerid, location=self.location, new_location=absolute_proper_path) # if it's already named correctly then don't do anything if proper_path == current_path: - logger.log(u'{id}: File {location} is already named correctly, skipping'.format - (id=self.indexerid, location=self.location), logger.DEBUG) + logger.debug('{id}: File {location} is already named correctly, skipping', + id=self.indexerid, location=self.location) return related_files = post_processor.PostProcessor(self.location).list_associated_files( @@ -1462,8 +1460,8 @@ def rename(self): related_subs = post_processor.PostProcessor( self.location).list_associated_files(app.SUBTITLES_DIR, subtitles_only=True, subfolders=True) - logger.log(u'{id} Files associated to {location}: {related_files}'.format - (id=self.indexerid, location=self.location, related_files=related_files), logger.DEBUG) + logger.debug('{id} Files associated to {location}: {related_files}', + id=self.indexerid, location=self.location, related_files=related_files) # move the ep file result = helpers.rename_ep_file(self.location, absolute_proper_path, absolute_current_path_no_ext_length) @@ -1482,16 +1480,16 @@ def rename(self): cur_result = helpers.rename_ep_file(cur_related_file, proper_related_path, absolute_current_path_no_ext_length + len(subfolder)) if not cur_result: - logger.log(u'{id}: Unable to rename file {cur_file}'.format - (id=self.indexerid, cur_file=cur_related_file), logger.WARNING) + logger.warning('{id}: Unable to rename file {cur_file}', + id=self.indexerid, cur_file=cur_related_file) for cur_related_sub in related_subs: absolute_proper_subs_path = os.path.join(app.SUBTITLES_DIR, self.formatted_filename()) cur_result = helpers.rename_ep_file(cur_related_sub, absolute_proper_subs_path, absolute_current_path_no_ext_length) if not cur_result: - logger.log(u'{id}: Unable to rename file {cur_file}'.format - (id=self.indexerid, cur_file=cur_related_sub), logger.WARNING) + logger.warning('{id}: Unable to rename file {cur_file}', + id=self.indexerid, cur_file=cur_related_sub) # save the ep with self.lock: @@ -1538,22 +1536,22 @@ def airdate_modify_stamp(self): if filemtime != airdatetime: airdatetime = airdatetime.timetuple() - logger.log(u"{id}: About to modify date of '{location}' to show air date {air_date}".format - (id=self.show.indexerid, location=self.location, - air_date=time.strftime('%b %d,%Y (%H:%M)', airdatetime)), logger.DEBUG) + logger.debug("{id}: About to modify date of '{location}' to show air date {air_date}", + id=self.show.indexerid, location=self.location, + air_date=time.strftime('%b %d,%Y (%H:%M)', airdatetime)) try: if helpers.touch_file(self.location, time.mktime(airdatetime)): - logger.log(u"{id}: Changed modify date of '{location}' to show air date {air_date}".format - (id=self.show.indexerid, location=os.path.basename(self.location), - air_date=time.strftime('%b %d,%Y (%H:%M)', airdatetime))) + logger.info("{id}: Changed modify date of '{location}' to show air date {air_date}", + id=self.show.indexerid, location=os.path.basename(self.location), + air_date=time.strftime('%b %d,%Y (%H:%M)', airdatetime)) else: - logger.log(u"{id}: Unable to modify date of '{location}' to show air date {air_date}".format - (id=self.show.indexerid, location=os.path.basename(self.location), - air_date=time.strftime('%b %d,%Y (%H:%M)', airdatetime)), logger.WARNING) + logger.warning("{id}: Unable to modify date of '{location}' to show air date {air_date}", + id=self.show.indexerid, location=os.path.basename(self.location), + air_date=time.strftime('%b %d,%Y (%H:%M)', airdatetime)) except Exception: - logger.log(u"{id}: Failed to modify date of '{location}' to show air date {air_date}".format - (id=self.show.indexerid, location=os.path.basename(self.location), - air_date=time.strftime('%b %d,%Y (%H:%M)', airdatetime)), logger.WARNING) + logger.warning("{id}: Failed to modify date of '{location}' to show air date {air_date}", + id=self.show.indexerid, location=os.path.basename(self.location), + air_date=time.strftime('%b %d,%Y (%H:%M)', airdatetime)) except Exception: - logger.log(u"{id}: Failed to modify date of '{location}'".format - (id=self.show.indexerid, location=os.path.basename(self.location)), logger.WARNING) + logger.warning("{id}: Failed to modify date of '{location}'", + id=self.show.indexerid, location=os.path.basename(self.location)) From d9ab6ef4f7de058385fd3f4a9203c49b6de6c704 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sat, 18 Feb 2017 13:33:31 +0100 Subject: [PATCH 005/344] Tnx codacy! --- medusa/tv/episode.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/medusa/tv/episode.py b/medusa/tv/episode.py index cc52327bc6..c0e3330b9d 100644 --- a/medusa/tv/episode.py +++ b/medusa/tv/episode.py @@ -457,7 +457,7 @@ def load_from_indexer(self, season=None, episode=None, tvapi=None, cached_season except (IndexerError, IOError) as e: logger.warning('{id}: {indexer} threw up an error: {error_msg}', - id=self.show.indexerid, indexer=indexerApi(self.indexer).name, error_msg=ex(e)), + id=self.show.indexerid, indexer=indexerApi(self.indexer).name, error_msg=ex(e)) # if the episode is already valid just log it, if not throw it up if self.name: @@ -751,8 +751,7 @@ def to_json(self, detailed=True): def create_meta_files(self): """Create episode metadata files.""" if not self.show.is_location_valid(): - logger.warning('{id}: The show dir is missing, unable to create metadata', id=self.show.indexerid), - + logger.warning('{id}: The show dir is missing, unable to create metadata', id=self.show.indexerid) return for metadata_provider in app.metadata_provider_dict.values(): From fd2ab6f0fefc5fb5a3c965bc014d765a3c5475c7 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sat, 18 Feb 2017 13:39:17 +0100 Subject: [PATCH 006/344] Fixed some alignments on tvseries. --- medusa/tv/series.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/medusa/tv/series.py b/medusa/tv/series.py index db6a6869a5..7b1c20b168 100644 --- a/medusa/tv/series.py +++ b/medusa/tv/series.py @@ -453,12 +453,12 @@ def get_episode(self, season=None, episode=None, filepath=None, no_create=False, b'WHERE showid = ? AND absolute_number = ? AND season != 0' sql_args = [self.indexerid, absolute_number] logger.debug(u'{id}: Season and episode lookup for {show} using absolute number {absolute}', - id=self.indexerid, absolute=absolute_number, show=self.name) + id=self.indexerid, absolute=absolute_number, show=self.name) elif air_date: sql = b'SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?' sql_args = [self.indexerid, air_date.toordinal()] logger.debug(u'{id}: Season and episode lookup for {show} using air date {air_date}', - id=self.indexerid, air_date=air_date, show=self.name) + id=self.indexerid, air_date=air_date, show=self.name) sql_results = main_db_con.select(sql, sql_args) if sql else [] if len(sql_results) == 1: @@ -823,10 +823,10 @@ def load_episodes_from_indexer(self, seasons=None, tvapi=None): logger.warning( u'{id}: {indexer} error, unable to update episodes.' u' Message: {ex}', - id=self.indexerid, - indexer=indexerApi(self.indexer).name, - ex=e - ) + id=self.indexerid, + indexer=indexerApi(self.indexer).name, + ex=e + ) raise logger.debug( From bd0f6157017aa18a36d531f5a95fc01b01d94163 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sat, 18 Feb 2017 19:40:10 +0100 Subject: [PATCH 007/344] Changed old style logs to new style for queue.py. --- medusa/search/queue.py | 133 +++++++++++++++++++++++------------------ 1 file changed, 74 insertions(+), 59 deletions(-) diff --git a/medusa/search/queue.py b/medusa/search/queue.py index 143f325561..07b3102520 100644 --- a/medusa/search/queue.py +++ b/medusa/search/queue.py @@ -19,11 +19,12 @@ from __future__ import unicode_literals +import logging import threading import time import traceback -from .. import app, common, failed_history, generic_queue, history, logger, providers, ui +from .. import app, common, failed_history, generic_queue, history, providers, ui from ..helpers import pretty_file_size from ..search.core import ( search_for_needed_episodes, @@ -42,6 +43,8 @@ FORCED_SEARCH_HISTORY = [] FORCED_SEARCH_HISTORY_SIZE = 100 +lagger = logging.getLogger(__name__) + class SearchQueue(generic_queue.GenericQueue): def __init__(self): @@ -97,7 +100,7 @@ def add_item(self, item): and not self.is_in_queue(item.show, item.segment): generic_queue.GenericQueue.add_item(self, item) else: - logger.log("Not adding item, it's already in the queue", logger.DEBUG) + lagger.debug("Not adding item, it's already in the queue") def force_daily(self): if not self.is_dailysearch_in_progress and not self.currentItem.amActive: @@ -185,7 +188,7 @@ def add_item(self, item): # manual, snatch and failed searches generic_queue.GenericQueue.add_item(self, item) else: - logger.log("Not adding item, it's already in the queue", logger.DEBUG) + lagger.debug("Not adding item, it's already in the queue") class SnatchQueue(generic_queue.GenericQueue): @@ -232,7 +235,7 @@ def add_item(self, item): # backlog searches generic_queue.GenericQueue.add_item(self, item) else: - logger.log("Not adding item, it's already in the queue", logger.DEBUG) + lagger.debug("Not adding item, it's already in the queue") class DailySearchQueueItem(generic_queue.QueueItem): @@ -243,28 +246,27 @@ def __init__(self): self.started = None def run(self): - """ - Run daily search thread - """ + """Run daily search thread.""" generic_queue.QueueItem.run(self) self.started = True try: - logger.log("Beginning daily search for new episodes") + lagger.info('Beginning daily search for new episodes') found_results = search_for_needed_episodes() if not found_results: - logger.log("No needed episodes found") + lagger.info('No needed episodes found') else: for result in found_results: # just use the first result for now if result.seeders not in (-1, None) and result.leechers not in (-1, None): - logger.log("Downloading {0} with {1} seeders and {2} leechers and size {3} from {4}".format - (result.name, result.seeders, result.leechers, pretty_file_size(result.size), - result.provider.name)) + lagger.info('Downloading {name} with {seeders} seeders and {leechers} leechers ' + 'and size {size} from {provider}', + name=result.name, seeders=result.seeders, leechers=result.leechers, + size=pretty_file_size(result.size), provider=result.provider.name) else: - logger.log("Downloading {0} with size: {1} from {2}".format - (result.name, pretty_file_size(result.size), result.provider.name)) + lagger.info('Downloading {name} with size: {size} from {provider}', + name=result.name, size=pretty_file_size(result.size), provider=result.provider.name) self.success = snatch_episode(result) # give the CPU a break @@ -272,7 +274,7 @@ def run(self): except Exception: self.success = False - logger.log(traceback.format_exc(), logger.DEBUG) + lagger.debug(traceback.format_exc()) if self.success is None: self.success = False @@ -296,7 +298,8 @@ def __init__(self, show, segment, down_cur_quality=False, manual_search=False, m self.priority = generic_queue.QueuePriorities.HIGH # SEARCHQUEUE-MANUAL-12345 # SEARCHQUEUE-FORCED-12345 - self.name = '{0}-{1}'.format(('FORCED', 'MANUAL')[bool(manual_search)], show.indexerid) + self.name = '{search_type}-{indexerid}'\ + .format(search_type=('FORCED', 'MANUAL')[bool(manual_search)], indexerid=show.indexerid) self.success = None self.started = None @@ -314,10 +317,10 @@ def run(self): self.started = True try: - logger.log('Beginning {0} {1}search for: [{2}]'. - format(('forced', 'manual')[bool(self.manual_search)], - ('', 'season pack ')[bool(self.manual_search_type == 'season')], - self.segment[0].pretty_name())) + lagger.info('Beginning {search_type} {season_pack}search for: [{ep}]', + search_type=('forced', 'manual')[bool(self.manual_search)], + season_pack=('', 'season pack ')[bool(self.manual_search_type == 'season')], + ep=self.segment[0].pretty_name()) search_result = search_providers(self.show, self.segment, True, self.down_cur_quality, self.manual_search, self.manual_search_type) @@ -326,13 +329,14 @@ def run(self): for result in search_result: # Just use the first result for now if result.seeders not in (-1, None) and result.leechers not in (-1, None): - logger.log('Downloading {0} with {1} seeders and {2} leechers and size {3} from {4}'.format - (result.name, result.seeders, result.leechers, - pretty_file_size(result.size), result.provider.name)) + lagger.info('Downloading {name} with {seeders} seeders and {leechers} leechers ' + 'and size {size} from {provider}', + name=result.name, seeders=result.seeders, leechers=result.leechers, + size=pretty_file_size(result.size), provider=result.provider.name) else: - logger.log('Downloading {0} with size: {1} from {2}'.format - (result.name, pretty_file_size(result.size), - result.provider.name)) + lagger.info('Downloading {name} with size: {size} from {provider}', + name=result.name, size=pretty_file_size(result.size), + provider=result.provider.name) self.success = snatch_episode(result) # Give the CPU a break @@ -343,22 +347,25 @@ def run(self): self.success = True if self.manual_search_type == 'season': - ui.notifications.message('We have found season packs for {0}'.format(self.show.name), + ui.notifications.message('We have found season packs for {show_name}' + .format(show_name=self.show.name), 'These should become visible in the manual select page.') else: - ui.notifications.message('We have found results for {0}'.format(self.segment[0].pretty_name()), + ui.notifications.message('We have found results for {ep}' + .format(ep=self.segment[0].pretty_name()), 'These should become visible in the manual select page.') else: ui.notifications.message('No results were found') - logger.log('Unable to find {0} {1}results for: [{2}]'. - format(('forced', 'manual')[bool(self.manual_search)], - ('', 'season pack ')[bool(self.manual_search_type == 'season')], - self.segment[0].pretty_name())) + lagger.info('Unable to find {search_type} {season_pack}results for: [{ep}]', + search_type=('forced', 'manual')[bool(self.manual_search)], + season_pack=('', 'season pack ')[bool(self.manual_search_type == 'season')], + ep=self.segment[0].pretty_name()) + # TODO: Remove catch all exception. except Exception: self.success = False - logger.log(traceback.format_exc(), logger.DEBUG) + lagger.debug(traceback.format_exc()) # Keep a list with the 100 last executed searches fifo(FORCED_SEARCH_HISTORY, self, FORCED_SEARCH_HISTORY_SIZE) @@ -415,28 +422,30 @@ def run(self): search_result.manually_searched = True try: - logger.log("Beginning to manual snatch release: {0}".format(search_result.name)) + lagger.info('Beginning to manual snatch release: {name}', name=search_result.name) if search_result: if search_result.seeders not in (-1, None) and search_result.leechers not in (-1, None): - logger.log("Downloading {0} with {1} seeders and {2} leechers and size {3} from {4}".format - (search_result.name, search_result.seeders, search_result.leechers, - pretty_file_size(search_result.size), search_result.provider.name)) + lagger.info('Downloading {name} with {seeders} seeders and {leechers} leechers ' + 'and size {size} from {provider}', + name=search_result.name, seeders=search_result.seeders, leechers=search_result.leechers, + size=pretty_file_size(search_result.size), provider=search_result.provider.name) else: - logger.log("Downloading {0} with size: {1} from {2}".format - (search_result.name, pretty_file_size(search_result.size), search_result.provider.name)) + lagger.info('Downloading {name} with size: {size} from {provider}', + name=search_result.name, size=pretty_file_size(search_result.size), + provider=search_result.provider.name) self.success = snatch_episode(search_result) else: - logger.log("Unable to snatch release: {0}".format(search_result.name)) + lagger.info('Unable to snatch release: {name}', name=search_result.name) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) except Exception: self.success = False - logger.log(traceback.format_exc(), logger.DEBUG) + lagger.debug(traceback.format_exc()) ui.notifications.message('Error while snatching selected result', - "Couldn't snatch the result for {0}".format(search_result.name)) + "Couldn't snatch the result for {name}".format(name=search_result.name)) if self.success is None: self.success = False @@ -465,29 +474,33 @@ def run(self): if not self.show.paused: try: - logger.log("Beginning backlog search for: [" + self.show.name + "]") + lagger.info('Beginning backlog search for: [{show_name}]', show_name=self.show.name) search_result = search_providers(self.show, self.segment) if search_result: for result in search_result: # just use the first result for now if result.seeders not in (-1, None) and result.leechers not in (-1, None): - logger.log("Downloading {0} with {1} seeders and {2} leechers and size {3} from {4}".format - (result.name, result.seeders, result.leechers, pretty_file_size(result.size), - result.provider.name)) + lagger.info('Downloading {name} with {seeders} seeders and {leechers} leechers ' + 'and size {size} from {provider}', + name=result.name, seeders=result.seeders, leechers=result.leechers, + size=pretty_file_size(result.size), provider=result.provider.name) else: - logger.log("Downloading {0} with size: {1} from {2}".format - (result.name, pretty_file_size(result.size), result.provider.name)) + lagger.info('Downloading {name} with size: {size} from {provider}', + name=result.name, size=pretty_file_size(result.size), + provider=result.provider.name) self.success = snatch_episode(result) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) else: - logger.log("No needed episodes found during backlog search for: [" + self.show.name + "]") + lagger.info('No needed episodes found during backlog search for: [{show_name}]', + show_name=self.show.name) + # TODO: Remove the catch all exception. except Exception: self.success = False - logger.log(traceback.format_exc(), logger.DEBUG) + lagger.debug(traceback.format_exc()) if self.success is None: self.success = False @@ -518,7 +531,7 @@ def run(self): try: for ep_obj in self.segment: - logger.log("Marking episode as bad: [" + ep_obj.pretty_name() + "]") + lagger.info('Marking episode as bad: [{ep}}', ep=ep_obj.pretty_name()) failed_history.mark_failed(ep_obj) @@ -528,7 +541,7 @@ def run(self): history.log_failed(ep_obj, release, provider) failed_history.revert_episode(ep_obj) - logger.log("Beginning failed download search for: [" + ep_obj.pretty_name() + "]") + lagger.info('Beginning failed download search for: [{ep}]', ep=ep_obj.pretty_name()) # If it is wanted, self.down_cur_quality doesnt matter # if it isnt wanted, we need to make sure to not overwrite the existing ep that we reverted to! @@ -538,22 +551,24 @@ def run(self): for result in search_result: # just use the first result for now if result.seeders not in (-1, None) and result.leechers not in (-1, None): - logger.log("Downloading {0} with {1} seeders and {2} leechers and size {3} from {4}".format - (result.name, result.seeders, result.leechers, pretty_file_size(result.size), - result.provider.name)) + lagger.info('Downloading {name} with {seeders} seeders and {leechers} leechers ' + 'and size {size} from {provider}', + name=result.name, seeders=result.seeders, leechers=result.leechers, + size=pretty_file_size(result.size), provider=result.provider.name) else: - logger.log("Downloading {0} with size: {1} from {2}".format - (result.name, pretty_file_size(result.size), result.provider.name)) + lagger.log('Downloading {name} with size: {size} from {provider}', + name=result.name, size=pretty_file_size(result.size), provider=result.provider.name) self.success = snatch_episode(result) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) else: - logger.log("No needed episodes found during failed search for: [" + self.show.name + "]") + lagger.info('No needed episodes found during failed search for: [{name}]', name=self.show.name) + # TODO: Replace the catch all exception with a more specific one. except Exception: self.success = False - logger.log(traceback.format_exc(), logger.DEBUG) + lagger.info(traceback.format_exc()) # ## Keep a list with the 100 last executed searches fifo(FORCED_SEARCH_HISTORY, self, FORCED_SEARCH_HISTORY_SIZE) From 8f452ce73fac2104a8152a95e9cddb93c6c7644f Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sat, 18 Feb 2017 20:42:18 +0100 Subject: [PATCH 008/344] Missed rename. --- medusa/search/queue.py | 60 +++++++++++++++++++++--------------------- 1 file changed, 30 insertions(+), 30 deletions(-) diff --git a/medusa/search/queue.py b/medusa/search/queue.py index 07b3102520..a735d0c125 100644 --- a/medusa/search/queue.py +++ b/medusa/search/queue.py @@ -43,7 +43,7 @@ FORCED_SEARCH_HISTORY = [] FORCED_SEARCH_HISTORY_SIZE = 100 -lagger = logging.getLogger(__name__) +logger = logging.getLogger(__name__) class SearchQueue(generic_queue.GenericQueue): @@ -100,7 +100,7 @@ def add_item(self, item): and not self.is_in_queue(item.show, item.segment): generic_queue.GenericQueue.add_item(self, item) else: - lagger.debug("Not adding item, it's already in the queue") + logger.debug("Not adding item, it's already in the queue") def force_daily(self): if not self.is_dailysearch_in_progress and not self.currentItem.amActive: @@ -188,7 +188,7 @@ def add_item(self, item): # manual, snatch and failed searches generic_queue.GenericQueue.add_item(self, item) else: - lagger.debug("Not adding item, it's already in the queue") + logger.debug("Not adding item, it's already in the queue") class SnatchQueue(generic_queue.GenericQueue): @@ -235,7 +235,7 @@ def add_item(self, item): # backlog searches generic_queue.GenericQueue.add_item(self, item) else: - lagger.debug("Not adding item, it's already in the queue") + logger.debug("Not adding item, it's already in the queue") class DailySearchQueueItem(generic_queue.QueueItem): @@ -251,21 +251,21 @@ def run(self): self.started = True try: - lagger.info('Beginning daily search for new episodes') + logger.info('Beginning daily search for new episodes') found_results = search_for_needed_episodes() if not found_results: - lagger.info('No needed episodes found') + logger.info('No needed episodes found') else: for result in found_results: # just use the first result for now if result.seeders not in (-1, None) and result.leechers not in (-1, None): - lagger.info('Downloading {name} with {seeders} seeders and {leechers} leechers ' + logger.info('Downloading {name} with {seeders} seeders and {leechers} leechers ' 'and size {size} from {provider}', name=result.name, seeders=result.seeders, leechers=result.leechers, size=pretty_file_size(result.size), provider=result.provider.name) else: - lagger.info('Downloading {name} with size: {size} from {provider}', + logger.info('Downloading {name} with size: {size} from {provider}', name=result.name, size=pretty_file_size(result.size), provider=result.provider.name) self.success = snatch_episode(result) @@ -274,7 +274,7 @@ def run(self): except Exception: self.success = False - lagger.debug(traceback.format_exc()) + logger.debug(traceback.format_exc()) if self.success is None: self.success = False @@ -317,7 +317,7 @@ def run(self): self.started = True try: - lagger.info('Beginning {search_type} {season_pack}search for: [{ep}]', + logger.info('Beginning {search_type} {season_pack}search for: [{ep}]', search_type=('forced', 'manual')[bool(self.manual_search)], season_pack=('', 'season pack ')[bool(self.manual_search_type == 'season')], ep=self.segment[0].pretty_name()) @@ -329,12 +329,12 @@ def run(self): for result in search_result: # Just use the first result for now if result.seeders not in (-1, None) and result.leechers not in (-1, None): - lagger.info('Downloading {name} with {seeders} seeders and {leechers} leechers ' + logger.info('Downloading {name} with {seeders} seeders and {leechers} leechers ' 'and size {size} from {provider}', name=result.name, seeders=result.seeders, leechers=result.leechers, size=pretty_file_size(result.size), provider=result.provider.name) else: - lagger.info('Downloading {name} with size: {size} from {provider}', + logger.info('Downloading {name} with size: {size} from {provider}', name=result.name, size=pretty_file_size(result.size), provider=result.provider.name) self.success = snatch_episode(result) @@ -357,7 +357,7 @@ def run(self): else: ui.notifications.message('No results were found') - lagger.info('Unable to find {search_type} {season_pack}results for: [{ep}]', + logger.info('Unable to find {search_type} {season_pack}results for: [{ep}]', search_type=('forced', 'manual')[bool(self.manual_search)], season_pack=('', 'season pack ')[bool(self.manual_search_type == 'season')], ep=self.segment[0].pretty_name()) @@ -365,7 +365,7 @@ def run(self): # TODO: Remove catch all exception. except Exception: self.success = False - lagger.debug(traceback.format_exc()) + logger.debug(traceback.format_exc()) # Keep a list with the 100 last executed searches fifo(FORCED_SEARCH_HISTORY, self, FORCED_SEARCH_HISTORY_SIZE) @@ -422,28 +422,28 @@ def run(self): search_result.manually_searched = True try: - lagger.info('Beginning to manual snatch release: {name}', name=search_result.name) + logger.info('Beginning to manual snatch release: {name}', name=search_result.name) if search_result: if search_result.seeders not in (-1, None) and search_result.leechers not in (-1, None): - lagger.info('Downloading {name} with {seeders} seeders and {leechers} leechers ' + logger.info('Downloading {name} with {seeders} seeders and {leechers} leechers ' 'and size {size} from {provider}', name=search_result.name, seeders=search_result.seeders, leechers=search_result.leechers, size=pretty_file_size(search_result.size), provider=search_result.provider.name) else: - lagger.info('Downloading {name} with size: {size} from {provider}', + logger.info('Downloading {name} with size: {size} from {provider}', name=search_result.name, size=pretty_file_size(search_result.size), provider=search_result.provider.name) self.success = snatch_episode(search_result) else: - lagger.info('Unable to snatch release: {name}', name=search_result.name) + logger.info('Unable to snatch release: {name}', name=search_result.name) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) except Exception: self.success = False - lagger.debug(traceback.format_exc()) + logger.debug(traceback.format_exc()) ui.notifications.message('Error while snatching selected result', "Couldn't snatch the result for {name}".format(name=search_result.name)) @@ -474,19 +474,19 @@ def run(self): if not self.show.paused: try: - lagger.info('Beginning backlog search for: [{show_name}]', show_name=self.show.name) + logger.info('Beginning backlog search for: [{show_name}]', show_name=self.show.name) search_result = search_providers(self.show, self.segment) if search_result: for result in search_result: # just use the first result for now if result.seeders not in (-1, None) and result.leechers not in (-1, None): - lagger.info('Downloading {name} with {seeders} seeders and {leechers} leechers ' + logger.info('Downloading {name} with {seeders} seeders and {leechers} leechers ' 'and size {size} from {provider}', name=result.name, seeders=result.seeders, leechers=result.leechers, size=pretty_file_size(result.size), provider=result.provider.name) else: - lagger.info('Downloading {name} with size: {size} from {provider}', + logger.info('Downloading {name} with size: {size} from {provider}', name=result.name, size=pretty_file_size(result.size), provider=result.provider.name) self.success = snatch_episode(result) @@ -494,13 +494,13 @@ def run(self): # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) else: - lagger.info('No needed episodes found during backlog search for: [{show_name}]', + logger.info('No needed episodes found during backlog search for: [{show_name}]', show_name=self.show.name) # TODO: Remove the catch all exception. except Exception: self.success = False - lagger.debug(traceback.format_exc()) + logger.debug(traceback.format_exc()) if self.success is None: self.success = False @@ -531,7 +531,7 @@ def run(self): try: for ep_obj in self.segment: - lagger.info('Marking episode as bad: [{ep}}', ep=ep_obj.pretty_name()) + logger.info('Marking episode as bad: [{ep}}', ep=ep_obj.pretty_name()) failed_history.mark_failed(ep_obj) @@ -541,7 +541,7 @@ def run(self): history.log_failed(ep_obj, release, provider) failed_history.revert_episode(ep_obj) - lagger.info('Beginning failed download search for: [{ep}]', ep=ep_obj.pretty_name()) + logger.info('Beginning failed download search for: [{ep}]', ep=ep_obj.pretty_name()) # If it is wanted, self.down_cur_quality doesnt matter # if it isnt wanted, we need to make sure to not overwrite the existing ep that we reverted to! @@ -551,24 +551,24 @@ def run(self): for result in search_result: # just use the first result for now if result.seeders not in (-1, None) and result.leechers not in (-1, None): - lagger.info('Downloading {name} with {seeders} seeders and {leechers} leechers ' + logger.info('Downloading {name} with {seeders} seeders and {leechers} leechers ' 'and size {size} from {provider}', name=result.name, seeders=result.seeders, leechers=result.leechers, size=pretty_file_size(result.size), provider=result.provider.name) else: - lagger.log('Downloading {name} with size: {size} from {provider}', + logger.log('Downloading {name} with size: {size} from {provider}', name=result.name, size=pretty_file_size(result.size), provider=result.provider.name) self.success = snatch_episode(result) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) else: - lagger.info('No needed episodes found during failed search for: [{name}]', name=self.show.name) + logger.info('No needed episodes found during failed search for: [{name}]', name=self.show.name) # TODO: Replace the catch all exception with a more specific one. except Exception: self.success = False - lagger.info(traceback.format_exc()) + logger.info(traceback.format_exc()) # ## Keep a list with the 100 last executed searches fifo(FORCED_SEARCH_HISTORY, self, FORCED_SEARCH_HISTORY_SIZE) From 6be97d1581c25f878bd0b43afcdb5da32eb817af Mon Sep 17 00:00:00 2001 From: Thraxis Date: Sun, 19 Feb 2017 14:39:42 -0800 Subject: [PATCH 009/344] will add lazy loading of images + adds the jquery.unveil lazy load plugin + replaces asset calls on pages that have large numbers of images. --- static/js/add-shows/init.js | 1 + static/js/home/index.js | 4 +++ static/js/lib/jquery.unveil.js | 56 +++++++++++++++++++++++++++++++++ views/addShows_recommended.mako | 2 +- views/layouts/main.mako | 1 + views/partials/home/banner.mako | 2 +- views/trendingShows.mako | 2 +- 7 files changed, 65 insertions(+), 3 deletions(-) create mode 100644 static/js/lib/jquery.unveil.js diff --git a/static/js/add-shows/init.js b/static/js/add-shows/init.js index a0d47b80fc..c84f284372 100644 --- a/static/js/add-shows/init.js +++ b/static/js/add-shows/init.js @@ -6,6 +6,7 @@ MEDUSA.addShows.init = function() { $.initRemoteShowGrid = function() { // Set defaults on page load + $("img").unveil(200); $('#showsort').val('original'); $('#showsortdirection').val('asc'); diff --git a/static/js/home/index.js b/static/js/home/index.js index 066833ad1d..85d6d131e2 100644 --- a/static/js/home/index.js +++ b/static/js/home/index.js @@ -14,6 +14,10 @@ MEDUSA.home.index = function() { }); }, 500)); + $(function() { + $("img").unveil(200); + }); + function resizePosters(newSize) { var fontSize; var logoWidth; diff --git a/static/js/lib/jquery.unveil.js b/static/js/lib/jquery.unveil.js new file mode 100644 index 0000000000..7eb41e388f --- /dev/null +++ b/static/js/lib/jquery.unveil.js @@ -0,0 +1,56 @@ +/** + * jQuery Unveil + * A very lightweight jQuery plugin to lazy load images + * http://luis-almeida.github.com/unveil + * + * Licensed under the MIT license. + * Copyright 2013 Lus Almeida + * https://github.com/luis-almeida + */ + +;(function($) { + + $.fn.unveil = function(threshold, callback) { + + var $w = $(window), + th = threshold || 0, + retina = window.devicePixelRatio > 1, + attrib = retina? "data-src-retina" : "data-src", + images = this, + loaded; + + this.one("unveil", function() { + var source = this.getAttribute(attrib); + source = source || this.getAttribute("data-src"); + if (source) { + this.setAttribute("src", source); + if (typeof callback === "function") callback.call(this); + } + }); + + function unveil() { + var inview = images.filter(function() { + var $e = $(this); + if ($e.is(":hidden")) return; + + var wt = $w.scrollTop(), + wb = wt + $w.height(), + et = $e.offset().top, + eb = et + $e.height(); + + return eb >= wt - th && et <= wb + th; + }); + + loaded = inview.trigger("unveil"); + images = images.not(loaded); + } + + $w.on("scroll.unveil resize.unveil lookup.unveil", unveil); + + unveil(); + + return this; + + }; + +})(window.jQuery || window.Zepto); \ No newline at end of file diff --git a/views/addShows_recommended.mako b/views/addShows_recommended.mako index d2ea0f037e..398e500efc 100644 --- a/views/addShows_recommended.mako +++ b/views/addShows_recommended.mako @@ -115,7 +115,7 @@ +
 
From 9f7464fb05b140dcd680d4674e380f8bb90b60d3 Mon Sep 17 00:00:00 2001 From: Fernando Date: Tue, 21 Feb 2017 06:16:52 -0300 Subject: [PATCH 018/344] Choose show's root at /home (#2231) * Choose show's root at /home * moves header above and folder select to the left. * small fix in broken html line * another html fix (cherry picked from commit 0d9e55fb82d327a2cfb8bde6adb0486801b61499) * Fix not selecting saved root folder --- medusa/__main__.py | 2 ++ medusa/app.py | 1 + medusa/server/api/v2/config.py | 5 +++++ medusa/server/web/home/handler.py | 16 ++++++++++++++-- static/js/home/index.js | 16 +++++++++++++++- views/home.mako | 29 +++++++++++++++++++++++++---- 6 files changed, 62 insertions(+), 7 deletions(-) diff --git a/medusa/__main__.py b/medusa/__main__.py index fa8274f1df..475388a3cc 100755 --- a/medusa/__main__.py +++ b/medusa/__main__.py @@ -911,6 +911,7 @@ def initialize(self, console_logging=True): app.RELEASES_IN_PP = [] app.GIT_REMOTE_BRANCHES = [] app.KODI_LIBRARY_CLEAN_PENDING = False + app.SHOWS_ROOT = check_setting_int(app.CFG, 'GUI', 'shows_root', -1) # reconfigure the logger app_logger.reconfigure() @@ -1466,6 +1467,7 @@ def save_config(): new_config['General']['display_all_seasons'] = int(app.DISPLAY_ALL_SEASONS) new_config['General']['news_last_read'] = app.NEWS_LAST_READ new_config['General']['broken_providers'] = helpers.get_broken_providers() or app.BROKEN_PROVIDERS + new_config['General']['shows_root'] = int(app.SHOWS_ROOT) new_config['Blackhole'] = {} new_config['Blackhole']['nzb_dir'] = app.NZB_DIR diff --git a/medusa/app.py b/medusa/app.py index d968edec5e..33fc5d1b0b 100644 --- a/medusa/app.py +++ b/medusa/app.py @@ -511,6 +511,7 @@ POSTER_SORTDIR = None FANART_BACKGROUND = None FANART_BACKGROUND_OPACITY = None +SHOWS_ROOT = None USE_SUBTITLES = False SUBTITLES_LANGUAGES = [] diff --git a/medusa/server/api/v2/config.py b/medusa/server/api/v2/config.py index 4348ed58c2..79b59a837b 100644 --- a/medusa/server/api/v2/config.py +++ b/medusa/server/api/v2/config.py @@ -240,6 +240,11 @@ def patch(self, *args, **kwargs): # if 'host' in data['torrents']: # if 'rpcurl' in data['torrents']: # if 'authType' in data['torrents']: + if key == 'showsRoot': + root_id = int(data['showsRoot']['id']) + app.SHOWS_ROOT = root_id + done_data.setdefault('showsRoot', {}) + done_data['showsRoot'].setdefault('id', root_id) if key == 'layout': done_data.setdefault('layout', {}) if 'schedule' in data['layout']: diff --git a/medusa/server/web/home/handler.py b/medusa/server/web/home/handler.py index 27696377ad..936238c664 100644 --- a/medusa/server/web/home/handler.py +++ b/medusa/server/web/home/handler.py @@ -58,17 +58,29 @@ def _genericMessage(self, subject, message): def index(self): t = PageTemplate(rh=self, filename='home.mako') + shows_root = int(app.SHOWS_ROOT) + if shows_root is not None and app.ROOT_DIRS: + backend_pieces = app.ROOT_DIRS.split('|') + backend_dirs = backend_pieces[1:] + shows_dir = backend_dirs[shows_root] if shows_root != -1 else None + + shows = [] if app.ANIME_SPLIT_HOME: - shows = [] anime = [] for show in app.showList: + if shows_dir and not show._location.startswith(shows_dir): + continue if show.is_anime: anime.append(show) else: shows.append(show) show_lists = [['Shows', shows], ['Anime', anime]] else: - show_lists = [['Shows', app.showList]] + for show in app.showList: + if shows_dir and not show._location.startswith(shows_dir): + continue + shows.append(show) + show_lists = [['Shows', shows]] stats = self.show_statistics() return t.render(title='Home', header='Show List', topmenu='home', show_lists=show_lists, show_stat=stats[0], max_download_count=stats[1], controller='home', action='index') diff --git a/static/js/home/index.js b/static/js/home/index.js index 066833ad1d..4153bc3054 100644 --- a/static/js/home/index.js +++ b/static/js/home/index.js @@ -93,7 +93,7 @@ MEDUSA.home.index = function() { }); $('#showListTableShows:has(tbody tr), #showListTableAnime:has(tbody tr)').tablesorter({ - debug: true, + debug: false, sortList: [[7, 1], [2, 0]], textExtraction: (function() { return { @@ -320,4 +320,18 @@ MEDUSA.home.index = function() { log.info(error); }); }); + + $('#showRootDir').on('change', function(){ + api.patch('config', { + showsRoot: { + id: $(this).val() + } + }).then(function(response) { + log.info(response); + window.location.reload(); + }).catch(function (error) { + log.info(error); + }); + }); + }; diff --git a/views/home.mako b/views/home.mako index cd2988a181..579e343b64 100644 --- a/views/home.mako +++ b/views/home.mako @@ -7,6 +7,14 @@ from medusa.helper.common import pretty_file_size from random import choice import re + + if app.ROOT_DIRS: + backend_pieces = app.ROOT_DIRS.split('|') + backend_default = 'rd-' + backend_pieces[0] + backend_dirs = backend_pieces[1:] + else: + backend_default = '' + backend_dirs = [] %> <%block name="metas"> @@ -21,16 +29,14 @@
Direction: -
Sort By: - @@ -53,7 +59,22 @@ % else:

${title}

% endif +
+ +
+ +
+
+
+ +
% if app.HOME_LAYOUT != 'poster': From 54bddc5a7ca8e8277eae3dea2fb8c41c0867f658 Mon Sep 17 00:00:00 2001 From: Fernando Date: Tue, 21 Feb 2017 06:17:09 -0300 Subject: [PATCH 019/344] Auto refresh page after change theme (#2232) * Auto refresh page after select theme * Remove text to refresh browser --- medusa/server/api/v2/config.py | 4 ++++ static/js/config/index.js | 14 ++++++++++++++ views/config_general.mako | 1 - 3 files changed, 18 insertions(+), 1 deletion(-) diff --git a/medusa/server/api/v2/config.py b/medusa/server/api/v2/config.py index 79b59a837b..fbd5e4d869 100644 --- a/medusa/server/api/v2/config.py +++ b/medusa/server/api/v2/config.py @@ -275,6 +275,10 @@ def patch(self, *args, **kwargs): if 'specials' in data['layout']['show'] and str(data['layout']['show']['specials']).lower() in ['true', 'false']: app.DISPLAY_SHOW_SPECIALS = int(data['layout']['show']['specials']) done_data['layout']['show'].setdefault('specials', bool(app.DISPLAY_SHOW_SPECIALS)) + if key == 'theme': + theme_name = data['theme']['name'] + app.THEME_NAME = theme_name + done_data['themeName'] = theme_name # Make sure to update the config file after everything is updated app.instance.save_config() if len(done_errors): diff --git a/static/js/config/index.js b/static/js/config/index.js index 71ed48ac04..a8eaf297fe 100644 --- a/static/js/config/index.js +++ b/static/js/config/index.js @@ -4,6 +4,20 @@ MEDUSA.config.index = function() { $('label[for="proxy_indexers"]').hide(); } + + $('#theme_name').on('change', function(){ + api.patch('config', { + theme: { + name: $(this).val() + } + }).then(function(response) { + log.info(response); + window.location.reload(); + }).catch(function (error) { + log.info(error); + }); + }); + $('input[name="proxy_setting"]').on('input', function() { if ($(this).val().length === 0) { $('input[id="proxy_indexers"]').prop('checked', false); diff --git a/views/config_general.mako b/views/config_general.mako index f98d0a7360..5065bb0480 100644 --- a/views/config_general.mako +++ b/views/config_general.mako @@ -220,7 +220,6 @@ - for appearance to take effect, save then refresh your browser
From d76c856c13dd03ccccd8a1a74c24f91250e2d613 Mon Sep 17 00:00:00 2001 From: Fernando Date: Tue, 21 Feb 2017 14:49:01 -0300 Subject: [PATCH 020/344] Final flake for medusa/search (#2014) * Final flake for medusa/search * Unused variable 'old_ep_status' --- medusa/post_processor.py | 2 +- medusa/search/__init__.py | 1 + medusa/search/backlog.py | 42 +++++++++++------- medusa/search/daily.py | 6 +-- medusa/search/manual.py | 29 ++++++------- medusa/search/queue.py | 90 +++++++++++++++++++++++++-------------- pytest.ini | 5 --- 7 files changed, 101 insertions(+), 74 deletions(-) diff --git a/medusa/post_processor.py b/medusa/post_processor.py index 6687dbe965..7559078618 100644 --- a/medusa/post_processor.py +++ b/medusa/post_processor.py @@ -976,7 +976,7 @@ def process(self): # retrieve/create the corresponding Episode objects ep_obj = self._get_ep_obj(show, season, episodes) - old_ep_status, old_ep_quality = common.Quality.split_composite_status(ep_obj.status) + _, old_ep_quality = common.Quality.split_composite_status(ep_obj.status) # get the quality of the episode we're processing if quality and common.Quality.qualityStrings[quality] != 'Unknown': diff --git a/medusa/search/__init__.py b/medusa/search/__init__.py index e69de29bb2..61b45a0cbc 100644 --- a/medusa/search/__init__.py +++ b/medusa/search/__init__.py @@ -0,0 +1 @@ +"""Search module for all Medusa searches.""" diff --git a/medusa/search/backlog.py b/medusa/search/backlog.py index f15c23f016..3fd3455a43 100644 --- a/medusa/search/backlog.py +++ b/medusa/search/backlog.py @@ -15,12 +15,13 @@ # # You should have received a copy of the GNU General Public License # along with Medusa. If not, see . - +"""Backlog module.""" import datetime import threading from six import iteritems + from .queue import BacklogQueueItem from .. import app, common, db, logger, scheduler, ui @@ -29,11 +30,15 @@ class BacklogSearchScheduler(scheduler.Scheduler): - def forceSearch(self): + """Backlog search scheduler class.""" + + def force_search(self): + """Set the last backlog in the DB.""" self.action._set_last_backlog(1) self.lastRun = datetime.datetime.fromordinal(1) def next_run(self): + """Return when backlog should run next.""" if self.action._last_backlog <= 1: return datetime.date.today() else: @@ -41,8 +46,10 @@ def next_run(self): class BacklogSearcher(object): - def __init__(self): + """Backlog Searcher class.""" + def __init__(self): + """Initialize the class.""" self._last_backlog = self._get_last_backlog() self.cycleTime = app.BACKLOG_FREQUENCY / 60.0 / 24 self.lock = threading.Lock() @@ -52,24 +59,27 @@ def __init__(self): self.forced = False self.currentSearchInfo = {} - self._resetPI() + self._reset_pi() - def _resetPI(self): + def _reset_pi(self): + """Reset percent done.""" self.percentDone = 0 self.currentSearchInfo = {'title': 'Initializing'} def get_progress_indicator(self): + """Get backlog search progress indicator.""" if self.amActive: return ui.ProgressIndicator(self.percentDone, self.currentSearchInfo) else: return None def am_running(self): + """Check if backlog is running.""" logger.log(u"amWaiting: " + str(self.amWaiting) + ", amActive: " + str(self.amActive), logger.DEBUG) return (not self.amWaiting) and self.amActive def search_backlog(self, which_shows=None): - + """Run the backlog search for given shows.""" if self.amActive: logger.log(u"Backlog is still running, not starting it again", logger.DEBUG) return @@ -88,7 +98,7 @@ def search_backlog(self, which_shows=None): self._get_last_backlog() - curDate = datetime.date.today().toordinal() + cur_date = datetime.date.today().toordinal() from_date = datetime.date.fromordinal(1) if not which_shows and self.forced: @@ -117,13 +127,13 @@ def search_backlog(self, which_shows=None): # don't consider this an actual backlog search if we only did recent eps # or if we only did certain shows if from_date == datetime.date.fromordinal(1) and not which_shows: - self._set_last_backlog(curDate) + self._set_last_backlog(cur_date) self.amActive = False - self._resetPI() + self._reset_pi() def _get_last_backlog(self): - + """Get the last time backloged runned.""" logger.log(u"Retrieving the last check time from the DB", logger.DEBUG) main_db_con = db.DBConnection() @@ -141,14 +151,14 @@ def _get_last_backlog(self): self._last_backlog = last_backlog return self._last_backlog - def _get_segments(self, show, from_date): + @staticmethod + def _get_segments(show, from_date): + """Get episodes that should be backlog searched.""" wanted = {} if show.paused: logger.log(u"Skipping backlog for %s because the show is paused" % show.name, logger.DEBUG) return wanted - allowed_qualities, preferred_qualities = common.Quality.split_quality(show.quality) - logger.log(u"Seeing if we need anything from %s" % show.name, logger.DEBUG) con = db.DBConnection() @@ -175,8 +185,9 @@ def _get_segments(self, show, from_date): return wanted - def _set_last_backlog(self, when): - + @staticmethod + def _set_last_backlog(when): + """Set the last backlog in the DB.""" logger.log(u"Setting the last backlog in the DB to " + str(when), logger.DEBUG) main_db_con = db.DBConnection() @@ -188,6 +199,7 @@ def _set_last_backlog(self, when): main_db_con.action("UPDATE info SET last_backlog={0}".format(when)) def run(self, force=False): + """Run the backlog.""" try: if force: self.forced = True diff --git a/medusa/search/daily.py b/medusa/search/daily.py index 1af23c9e12..685b02137f 100644 --- a/medusa/search/daily.py +++ b/medusa/search/daily.py @@ -16,7 +16,7 @@ # # You should have received a copy of the GNU General Public License # along with Medusa. If not, see . - +"""Daily searcher module.""" from __future__ import unicode_literals import threading @@ -35,12 +35,12 @@ class DailySearcher(object): # pylint:disable=too-few-public-methods """Daily search class.""" def __init__(self): + """Initialize the class.""" self.lock = threading.Lock() self.amActive = False def run(self, force=False): # pylint:disable=too-many-branches - """ - Runs the daily searcher, queuing selected episodes for search + """Run the daily searcher, queuing selected episodes for search. :param force: Force search """ diff --git a/medusa/search/manual.py b/medusa/search/manual.py index c8d3cbe363..8f8d020226 100644 --- a/medusa/search/manual.py +++ b/medusa/search/manual.py @@ -16,14 +16,16 @@ # # You should have received a copy of the GNU General Public License # along with Medusa. If not, see . - +"""Manual search module.""" import json import threading import time from datetime import datetime + from dateutil import parser + from .queue import FORCED_SEARCH_HISTORY, ForcedSearchQueueItem from .. import app, db, logger from ..common import Overview, Quality, cpu_presets, statusStrings @@ -38,10 +40,7 @@ def get_quality_class(ep_obj): - """ - Find the quality class for the episode - """ - + """Find the quality class for the episode.""" _, ep_quality = Quality.split_composite_status(ep_obj.status) if ep_quality in Quality.cssClassStrings: quality_class = Quality.cssClassStrings[ep_quality] @@ -52,7 +51,7 @@ def get_quality_class(ep_obj): def get_episode(show, season=None, episode=None, absolute=None): - """ Get a specific episode object based on show, season and episode number + """Get a specific episode object based on show, season and episode number. :param show: Season number :param season: Season number @@ -82,8 +81,7 @@ def get_episode(show, season=None, episode=None, absolute=None): def get_episodes(search_thread, searchstatus): - """ Get all episodes located in a search thread with a specific status """ - + """Get all episodes located in a search thread with a specific status.""" results = [] # NOTE!: Show.find called with just indexerid! show_obj = Show.find(app.showList, int(search_thread.show.indexerid)) @@ -115,8 +113,8 @@ def get_episodes(search_thread, searchstatus): def update_finished_search_queue_item(snatch_queue_item): - """ - Updates the previous manual searched queue item with the correct status + """Update the previous manual searched queue item with the correct status. + @param snatch_queue_item: A successful snatch queue item, send from pickManualSearch(). @return: True if status update was successful, False if not. """ @@ -140,9 +138,9 @@ def update_finished_search_queue_item(snatch_queue_item): def collect_episodes_from_search_thread(show): - """ - Collects all episodes from from the forced_search_queue_scheduler - and looks for episodes that are in status queued or searching. + """Collect all episodes from from the forced_search_queue_scheduler. + + And looks for episodes that are in status queued or searching. If episodes are found in FORCED_SEARCH_HISTORY, these are set to status finished. """ episodes = [] @@ -180,10 +178,7 @@ def collect_episodes_from_search_thread(show): def get_provider_cache_results(indexer, show_all_results=None, perform_search=None, **search_show): # pylint: disable=too-many-locals,unused-argument - """ - Check all provider cache tables for search results - """ - + """Check all provider cache tables for search results.""" show = search_show.get('show') season = search_show.get('season') episode = search_show.get('episode') diff --git a/medusa/search/queue.py b/medusa/search/queue.py index a735d0c125..94060e010a 100644 --- a/medusa/search/queue.py +++ b/medusa/search/queue.py @@ -47,12 +47,16 @@ class SearchQueue(generic_queue.GenericQueue): + """Search queue class.""" + def __init__(self): + """Initialize the class.""" generic_queue.GenericQueue.__init__(self) self.queue_name = "SEARCHQUEUE" self.force = False def is_in_queue(self, show, segment): + """Check if item is in queue.""" for cur_item in self.queue: if isinstance(cur_item, (BacklogQueueItem, FailedQueueItem, ForcedSearchQueueItem, ManualSnatchQueueItem)) \ @@ -61,28 +65,34 @@ def is_in_queue(self, show, segment): return False def pause_backlog(self): + """Pause the backlog.""" self.min_priority = generic_queue.QueuePriorities.HIGH def unpause_backlog(self): + """Unpause the backlog.""" self.min_priority = 0 def is_backlog_paused(self): + """Check if backlog is paused.""" # backlog priorities are NORMAL, this should be done properly somewhere return self.min_priority >= generic_queue.QueuePriorities.NORMAL def is_backlog_in_progress(self): + """Check is backlog is in progress.""" for cur_item in self.queue + [self.currentItem]: if isinstance(cur_item, BacklogQueueItem): return True return False def is_dailysearch_in_progress(self): + """Check if daily search is in progress.""" for cur_item in self.queue + [self.currentItem]: if isinstance(cur_item, DailySearchQueueItem): return True return False def queue_length(self): + """Get queue lenght.""" length = {'backlog': 0, 'daily': 0} for cur_item in self.queue: if isinstance(cur_item, DailySearchQueueItem): @@ -92,6 +102,7 @@ def queue_length(self): return length def add_item(self, item): + """Add item to queue.""" if isinstance(item, DailySearchQueueItem): # daily searches generic_queue.GenericQueue.add_item(self, item) @@ -103,6 +114,7 @@ def add_item(self, item): logger.debug("Not adding item, it's already in the queue") def force_daily(self): + """Force daily searched.""" if not self.is_dailysearch_in_progress and not self.currentItem.amActive: self.force = True return True @@ -110,26 +122,22 @@ def force_daily(self): class ForcedSearchQueue(generic_queue.GenericQueue): - """Search Queueu used for Forced Search, Failed Search and """ + """Search Queueu used for Forced Search, Failed Search.""" + def __init__(self): """Initialize ForcedSearch Queue.""" generic_queue.GenericQueue.__init__(self) self.queue_name = "SEARCHQUEUE" def is_in_queue(self, show, segment): - """ - Verify if the show and segment (episode or number of episodes) are scheduled. - """ + """Verify if the show and segment (episode or number of episodes) are scheduled.""" for cur_item in self.queue: if cur_item.show == show and cur_item.segment == segment: return True return False def is_ep_in_queue(self, segment): - """ - Verify if the show and segment (episode or number of episodes) are scheduled in a - ForcedSearchQueueItem or FailedQueueItem. - """ + """Verify if the show and segment (episode or number of episodes) are scheduled.""" for cur_item in self.queue: if isinstance(cur_item, (ForcedSearchQueueItem, FailedQueueItem)) and cur_item.segment == segment: return True @@ -143,8 +151,8 @@ def is_show_in_queue(self, show): return False def get_all_ep_from_queue(self, show): - """ - Get QueueItems from the queue if the queue item is scheduled to search for the passed Show. + """Get QueueItems from the queue if the queue item is scheduled to search for the passed Show. + @param show: Show indexer_id @return: A list of ForcedSearchQueueItem or FailedQueueItem items @@ -158,20 +166,21 @@ def get_all_ep_from_queue(self, show): return ep_obj_list def is_backlog_paused(self): - """ - Verify if the ForcedSearchQueue's min_priority has been changed. This indicates that the - queue has been paused. + """Verify if the ForcedSearchQueue's min_priority has been changed. + + This indicates that the queue has been paused. # backlog priorities are NORMAL, this should be done properly somewhere """ return self.min_priority >= generic_queue.QueuePriorities.NORMAL def is_forced_search_in_progress(self): - """Tests of a forced search is currently running, it doesn't check what's in queue.""" + """Test of a forced search is currently running, it doesn't check what's in queue.""" if isinstance(self.currentItem, (ForcedSearchQueueItem, FailedQueueItem)): return True return False def queue_length(self): + """Get queue length.""" length = {'forced_search': 0, 'manual_search': 0, 'failed': 0} for cur_item in self.queue: if isinstance(cur_item, FailedQueueItem): @@ -192,14 +201,16 @@ def add_item(self, item): class SnatchQueue(generic_queue.GenericQueue): - """Queue for queuing ManualSnatchQueueItem objects (snatch jobs)""" + """Queue for queuing ManualSnatchQueueItem objects (snatch jobs).""" + def __init__(self): """Initialize the SnatchQueue object.""" generic_queue.GenericQueue.__init__(self) self.queue_name = "SNATCHQUEUE" def is_in_queue(self, show, segment): - """Check if the passed show and segment (episode of list of episodes) is in the queue + """Check if the passed show and segment (episode of list of episodes) is in the queue. + @param show: show object @param segment: list of episode objects @@ -211,7 +222,8 @@ def is_in_queue(self, show, segment): return False def is_ep_in_queue(self, segment): - """Check if the passed segment (episode of list of episodes) is in the queue + """Check if the passed segment (episode of list of episodes) is in the queue. + @param segment: list of episode objects @return: True or False @@ -222,13 +234,15 @@ def is_ep_in_queue(self, segment): return False def queue_length(self): - """Get the length of the current queue + """Get the length of the current queue. + @return: length of queue """ return {'manual_snatch': len(self.queue)} def add_item(self, item): - """Add a ManualSnatchQueueItem queue item + """Add a ManualSnatchQueueItem queue item. + @param item: ManualSnatchQueueItem gueue object """ if not self.is_in_queue(item.show, item.segment): @@ -239,7 +253,10 @@ def add_item(self, item): class DailySearchQueueItem(generic_queue.QueueItem): + """Daily searche queue item class.""" + def __init__(self): + """Initialize the class.""" generic_queue.QueueItem.__init__(self, u'Daily Search', DAILY_SEARCH) self.success = None @@ -283,8 +300,11 @@ def run(self): class ForcedSearchQueueItem(generic_queue.QueueItem): + """Forced search queue item class.""" + def __init__(self, show, segment, down_cur_quality=False, manual_search=False, manual_search_type='episode'): - """A Queueitem used to queue Forced Searches and Manual Searches + """A Queueitem used to queue Forced Searches and Manual Searches. + @param show: A show object @param segment: A list of episode objects. Needs to be passed as list! @param down_cur_quality: Not sure what it's used for. Maybe legacy. @@ -379,6 +399,7 @@ def run(self): class ManualSnatchQueueItem(generic_queue.QueueItem): """ A queue item that can be used to queue the snatch of a search result. + Currently used for the snatchSelection feature. @param show: A show object @@ -388,7 +409,9 @@ class ManualSnatchQueueItem(generic_queue.QueueItem): @return: The run() methods snatches the episode(s) if possible. """ + def __init__(self, show, segment, provider, cached_result): + """Initialize the class.""" generic_queue.QueueItem.__init__(self, u'Manual Search', MANUAL_SEARCH) self.priority = generic_queue.QueuePriorities.HIGH self.name = 'MANUALSNATCH-' + str(show.indexerid) @@ -401,9 +424,7 @@ def __init__(self, show, segment, provider, cached_result): self.cached_result = cached_result def run(self): - """ - Run manual snatch job - """ + """Run manual snatch job.""" generic_queue.QueueItem.run(self) self.started = True @@ -454,7 +475,10 @@ def run(self): class BacklogQueueItem(generic_queue.QueueItem): + """Backlog queue item class.""" + def __init__(self, show, segment): + """Initialize the class.""" generic_queue.QueueItem.__init__(self, u'Backlog', BACKLOG_SEARCH) self.priority = generic_queue.QueuePriorities.LOW self.name = 'BACKLOG-' + str(show.indexerid) @@ -466,9 +490,7 @@ def __init__(self, show, segment): self.segment = segment def run(self): - """ - Run backlog search thread - """ + """Run backlog search thread.""" generic_queue.QueueItem.run(self) self.started = True @@ -509,7 +531,10 @@ def run(self): class FailedQueueItem(generic_queue.QueueItem): + """Failed queue item class.""" + def __init__(self, show, segment, down_cur_quality=False): + """Initialize the class.""" generic_queue.QueueItem.__init__(self, u'Retry', FAILED_SEARCH) self.priority = generic_queue.QueuePriorities.HIGH self.name = 'RETRY-' + str(show.indexerid) @@ -522,9 +547,7 @@ def __init__(self, show, segment, down_cur_quality=False): self.down_cur_quality = down_cur_quality def run(self): - """ - Run failed thread - """ + """Run failed thread.""" generic_queue.QueueItem.run(self) self.started = True @@ -579,7 +602,8 @@ def run(self): self.finish() -def fifo(myList, item, max_size=100): - if len(myList) >= max_size: - myList.pop(0) - myList.append(item) +def fifo(my_list, item, max_size=100): + """Append item to queue and limit it to 100 items.""" + if len(my_list) >= max_size: + my_list.pop(0) + my_list.append(item) diff --git a/pytest.ini b/pytest.ini index a2bb76bdd0..17dedaf8e7 100644 --- a/pytest.ini +++ b/pytest.ini @@ -96,11 +96,6 @@ flake8-ignore = medusa/scene_exceptions.py D100 medusa/scene_numbering.py D100 D200 D205 D400 D401 E501 N803 N806 medusa/scheduler.py D100 D101 D102 D200 D205 D400 D401 N802 N803 - medusa/search/__init__.py D104 - medusa/search/backlog.py D100 D101 D102 N802 N806 - medusa/search/daily.py D100 D102 D400 D401 - medusa/search/manual.py D100 D200 D202 D205 D210 D400 D401 - medusa/search/queue.py D100 D101 D102 D103 D200 D204 D205 D210 D400 D401 E231 N803 medusa/server/__init__.py D104 medusa/server/api/__init__.py D104 medusa/server/api/v1/__init__.py D104 From 190ec816f1da495a770130d87c5e4651663106b9 Mon Sep 17 00:00:00 2001 From: Dario Date: Tue, 21 Feb 2017 19:07:39 +0100 Subject: [PATCH 021/344] Fix air-by-date shows with same day special (#2261) --- medusa/name_parser/parser.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/medusa/name_parser/parser.py b/medusa/name_parser/parser.py index 06730fba36..964d8faa6a 100644 --- a/medusa/name_parser/parser.py +++ b/medusa/name_parser/parser.py @@ -93,6 +93,13 @@ def _parse_string(self, name): if sql_result: season_number = int(sql_result[0][0]) episode_numbers = [int(sql_result[0][1])] + + # Use the next query item if we have multiple results + # and the current one is a special episode (season 0) + if season_number == 0 and len(sql_result) > 1: + season_number = int(sql_result[1][0]) + episode_numbers = [int(sql_result[1][1])] + logger.debug('Database info for show {name}: Season: {season} Episode(s): {episodes}', name=result.show.name, season=season_number, episodes=episode_numbers) From 4d6fd5bb57ff9b71c3031b03b4a3cc509dc8536d Mon Sep 17 00:00:00 2001 From: Fernando Date: Tue, 21 Feb 2017 15:15:39 -0300 Subject: [PATCH 022/344] Fix typeError: debug() got multiple values for keyword argument 'msg' (#2262) --- medusa/tv/series.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/medusa/tv/series.py b/medusa/tv/series.py index 4721ade3be..0aeba0a52a 100644 --- a/medusa/tv/series.py +++ b/medusa/tv/series.py @@ -1823,13 +1823,13 @@ def want_episode(self, season, episode, quality, forced_search=False, download_c new_quality=Quality.qualityStrings[quality]) return True - should_replace, msg = Quality.should_replace(ep_status, cur_quality, quality, allowed_qualities, - preferred_qualities, download_current_quality, - forced_search, manually_searched) + should_replace, reason = Quality.should_replace(ep_status, cur_quality, quality, allowed_qualities, + preferred_qualities, download_current_quality, + forced_search, manually_searched) logger.debug(u"{id}: '{show}' {ep} status is: '{status}'. {action} result with quality '{new_quality}'. " - u"Reason: {msg}", id=self.indexerid, show=self.name, ep=episode_num(season, episode), + u"Reason: {reason}", id=self.indexerid, show=self.name, ep=episode_num(season, episode), status=ep_status_text, action='Accepting' if should_replace else 'Ignoring', - new_quality=Quality.qualityStrings[quality], msg=msg) + new_quality=Quality.qualityStrings[quality], reason=reason) return should_replace def get_overview(self, ep_status, backlog_mode=False, manually_searched=False): From 178c8c7c81aac7e7d376c598367f1583633957bb Mon Sep 17 00:00:00 2001 From: Fernando Date: Tue, 21 Feb 2017 17:17:02 -0300 Subject: [PATCH 023/344] Fix name cache log (#2265) Internal name cache for Legion set to: [[u'show']] to Internal name cache for Legion set to: [u'legion'] --- medusa/name_cache.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/medusa/name_cache.py b/medusa/name_cache.py index db3118ef41..128201684c 100644 --- a/medusa/name_cache.py +++ b/medusa/name_cache.py @@ -113,7 +113,7 @@ def _cache_name(show): # Add scene exceptions to name cache name_cache.update(names) - logger.log(u'Internal name cache for {show} set to: [{names}]'.format( + logger.log(u'Internal name cache for {show} set to: {names}'.format( show=show.name, names=names.keys() ), logger.DEBUG) From 475b3ac5edbf412a7ca54b14c78e4e04041cd89e Mon Sep 17 00:00:00 2001 From: Thraxis Date: Tue, 21 Feb 2017 19:37:43 -0800 Subject: [PATCH 024/344] Remove jquery.unveil, fix double -> single quotes --- static/js/home/index.js | 4 +-- static/js/lib/jquery.unveil.js | 56 ---------------------------------- 2 files changed, 2 insertions(+), 58 deletions(-) delete mode 100644 static/js/lib/jquery.unveil.js diff --git a/static/js/home/index.js b/static/js/home/index.js index 095c11ddcb..c01af4dc37 100644 --- a/static/js/home/index.js +++ b/static/js/home/index.js @@ -189,10 +189,10 @@ MEDUSA.home.index = function() { sortStable: true, sortAppend: [[2, 0]] }) - .bind("sortEnd",function(e, t){ + .bind('sortEnd',function(e, t){ imgLazyLoad.handleScroll(); }) - .bind("filterEnd",function(e, t){ + .bind('filterEnd',function(e, t){ imgLazyLoad.handleScroll(); }); diff --git a/static/js/lib/jquery.unveil.js b/static/js/lib/jquery.unveil.js deleted file mode 100644 index 7eb41e388f..0000000000 --- a/static/js/lib/jquery.unveil.js +++ /dev/null @@ -1,56 +0,0 @@ -/** - * jQuery Unveil - * A very lightweight jQuery plugin to lazy load images - * http://luis-almeida.github.com/unveil - * - * Licensed under the MIT license. - * Copyright 2013 Lus Almeida - * https://github.com/luis-almeida - */ - -;(function($) { - - $.fn.unveil = function(threshold, callback) { - - var $w = $(window), - th = threshold || 0, - retina = window.devicePixelRatio > 1, - attrib = retina? "data-src-retina" : "data-src", - images = this, - loaded; - - this.one("unveil", function() { - var source = this.getAttribute(attrib); - source = source || this.getAttribute("data-src"); - if (source) { - this.setAttribute("src", source); - if (typeof callback === "function") callback.call(this); - } - }); - - function unveil() { - var inview = images.filter(function() { - var $e = $(this); - if ($e.is(":hidden")) return; - - var wt = $w.scrollTop(), - wb = wt + $w.height(), - et = $e.offset().top, - eb = et + $e.height(); - - return eb >= wt - th && et <= wb + th; - }); - - loaded = inview.trigger("unveil"); - images = images.not(loaded); - } - - $w.on("scroll.unveil resize.unveil lookup.unveil", unveil); - - unveil(); - - return this; - - }; - -})(window.jQuery || window.Zepto); \ No newline at end of file From 43a88d0d8f4086977433ef0cd439d402b96f18d1 Mon Sep 17 00:00:00 2001 From: Thraxis Date: Tue, 21 Feb 2017 21:01:53 -0800 Subject: [PATCH 025/344] will fix several javascript lint errors --- static/js/common/init.js | 79 +++++++++++++++--------------- static/js/config-providers.js | 2 +- static/js/core.js | 19 ++++--- static/js/history/index.js | 6 +-- static/js/home/display-show.js | 28 +++++------ static/js/home/index.js | 15 +++--- static/js/home/snatch-selection.js | 20 ++++---- static/js/quality-chooser.js | 60 +++++++++++------------ static/js/schedule/index.js | 6 +-- 9 files changed, 117 insertions(+), 118 deletions(-) diff --git a/static/js/common/init.js b/static/js/common/init.js index e7d7f9707e..d5666a75bd 100644 --- a/static/js/common/init.js +++ b/static/js/common/init.js @@ -5,14 +5,14 @@ MEDUSA.common.init = function() { let asset = 'show/' + $('#showID').attr('value') + '?type=fanart'; let path = apiRoot + 'asset/' + asset + '&api_key=' + apiKey; $.backstretch(path); - $('.backstretch').css('top',backstretchOffset()); + $('.backstretch').css('top', backstretchOffset()); $('.backstretch').css('opacity', MEDUSA.config.fanartBackgroundOpacity).fadeIn(500); } } function backstretchOffset() { var offset = '90px'; - if($("#sub-menu-container").length == 0) { + if ($('#sub-menu-container').length === 0) { offset = '50px'; } if ($(window).width() < 1281) { @@ -22,7 +22,7 @@ MEDUSA.common.init = function() { } $(window).resize(function() { - $('.backstretch').css('top',backstretchOffset()); + $('.backstretch').css('top', backstretchOffset()); }); $.confirm.options = { @@ -182,7 +182,7 @@ MEDUSA.common.init = function() { }, position: { my: my, - at: at, + at: at }, style: { tip: { @@ -195,42 +195,43 @@ MEDUSA.common.init = function() { }); }; - // function to change luminance of #000000 color - used in triggerhighlighting - function ColorLuminance(hex, lum) { - hex = String(hex).replace(/[^0-9a-f]/gi, ''); - if (hex.length < 6) { - hex = hex[0]+hex[0]+hex[1]+hex[1]+hex[2]+hex[2]; - } - lum = lum || 0; - var rgb = "#", c, i; - for (i = 0; i < 3; i++) { - c = parseInt(hex.substr(i*2,2), 16); - c = Math.round(Math.min(Math.max(0, c + (c * lum)), 255)).toString(16); - rgb += ("00"+c).substr(c.length); - } - return rgb; +// function to change luminance of #000000 color - used in triggerhighlighting +function colorLuminance(hex, lum) { + hex = String(hex).replace(/[^0-9a-f]/gi, ''); + if (hex.length < 6) { + hex = hex[0] + hex[0] + hex[1] + hex[1] + hex[2] + hex[2]; } + lum = lum || 0; + var rgb = '#'; + var c; + var i; + for (i = 0; i < 3; i++) { + c = parseInt(hex.substr(i * 2, 2), 16); + c = Math.round(Math.min(Math.max(0, c + (c * lum)), 255)).toString(16); + rgb += ('00' + c).substr(c.length); + } + return rgb; +} // function to convert rgb(0,0,0) into #000000 - function rgb2hex(rgb) { - rgb = rgb.match(/^rgb\((\d+),\s*(\d+),\s*(\d+)\)$/); - function hex(x) { - return ("0" + parseInt(x).toString(16)).slice(-2); - } - return "#" + hex(rgb[1]) + hex(rgb[2]) + hex(rgb[3]); +function rgb2hex(rgb) { + rgb = rgb.match(/^rgb\((\d+),\s*(\d+),\s*(\d+)\)$/); + function hex(x) { + return ('0' + parseInt(x).toString(16)).slice(-2); } - - var revert_background_color; // used to revert back to original background-color after highlight - var allCells = $(".triggerhighlight"); - allCells - .on("mouseover", function() { - var el = $(this), - pos = el.index(); - revert_background_color = rgb2hex($(this).parent().css("background-color")); // fetch the original background-color to revert back to - var highlight_background_color = ColorLuminance(revert_background_color, -0.15); // change highlight color based on original color - el.parent().find(".triggerhighlight").css("background-color", highlight_background_color); // setting highlight background-color - }) - .on("mouseout", function() { - $(this).parent().find(".triggerhighlight").css("background-color", revert_background_color); // reverting back to original background-color - }); - + return '#' + hex(rgb[1]) + hex(rgb[2]) + hex(rgb[3]); +} + +var revertBackgroundColor; // used to revert back to original background-color after highlight +var allCells = $('.triggerhighlight'); +allCells +.on('mouseover', function() { + var el = $(this); + var pos = el.index(); + var revertBackgroundColor = rgb2hex($(this).parent().css('background-color')); // fetch the original background-color to revert back to + var highlightBackgroundColor = colorLuminance(revertBackgroundColor, -0.15); // change highlight color based on original color + el.parent().find('.triggerhighlight').css('background-color', highlightBackgroundColor); // setting highlight background-color +}) +.on('mouseout', function() { + $(this).parent().find('.triggerhighlight').css('background-color', revertBackgroundColor); // reverting back to original background-color +}); diff --git a/static/js/config-providers.js b/static/js/config-providers.js index 1bfb5f8f6c..8baa6074be 100644 --- a/static/js/config-providers.js +++ b/static/js/config-providers.js @@ -73,7 +73,7 @@ $(document).ready(function() { // eslint-disable-line max-lines newznabProviders[id] = newData; $('#editANewznabProvider').addOption(id, name); - $("select#editANewznabProvider").prop("selectedIndex", 0) + $('select#editANewznabProvider').prop('selectedIndex', 0); if ($('#provider_order_list > #' + id).length === 0 && showProvider !== false) { var toAdd = '
  • ' + name + ' ' + name + '
  • '; // eslint-disable-line no-undef diff --git a/static/js/core.js b/static/js/core.js index 0793f3eba0..1f62c7c66a 100644 --- a/static/js/core.js +++ b/static/js/core.js @@ -46,16 +46,15 @@ $.extend({ isMeta: function(pyVar, result) { // eslint-disable-line no-unused-vars var reg = new RegExp(result.length > 1 ? result.join('|') : result); - if (typeof(pyVar) === 'object' && Object.keys(pyVar).length == 1) { + if (typeof (pyVar) === 'object' && Object.keys(pyVar).length === 1) { return (reg).test(MEDUSA.config[Object.keys(pyVar)[0]][pyVar[Object.keys(pyVar)[0]]]); - } else { - if (pyVar.match('medusa')) { - pyVar.split('.')[1].toLowerCase().replace(/(_\w)/g, function(m) { - return m[1].toUpperCase(); - }); - } - return (reg).test(MEDUSA.config[pyVar]); } + if (pyVar.match('medusa')) { + pyVar.split('.')[1].toLowerCase().replace(/(_\w)/g, function(m) { + return m[1].toUpperCase(); + }); + } + return (reg).test(MEDUSA.config[pyVar]); } }); @@ -75,7 +74,7 @@ if (!document.location.pathname.endsWith('/login/')) { MEDUSA.config.themeSpinner = MEDUSA.config.themeName === 'dark' ? '-dark' : ''; MEDUSA.config.loading = ''; - $('[asset]').each(function(){ + $('[asset]').each(function() { let asset = $(this).attr('asset'); let path = apiRoot + 'asset/' + asset + '&api_key=' + apiKey; if (this.tagName.toLowerCase() === 'img') { @@ -89,7 +88,7 @@ if (!document.location.pathname.endsWith('/login/')) { if (navigator.userAgent.indexOf('PhantomJS') === -1) { $(document).ready(UTIL.init); } - }).catch(function (error) { + }).catch(function(err) { alert('Unable to connect to Medusa!'); // eslint-disable-line no-alert }); } diff --git a/static/js/history/index.js b/static/js/history/index.js index 6d35d241b7..cd2b0bd52d 100644 --- a/static/js/history/index.js +++ b/static/js/history/index.js @@ -35,7 +35,7 @@ MEDUSA.history.index = function() { window.location.href = $('base').attr('href') + 'history/?limit=' + $(this).val(); }); - $('.show-option select[name="layout"]').on('change', function(){ + $('.show-option select[name="layout"]').on('change', function() { api.patch('config', { layout: { history: $(this).val() @@ -43,8 +43,8 @@ MEDUSA.history.index = function() { }).then(function(response) { log.info(response); window.location.reload(); - }).catch(function (error) { - log.info(error); + }).catch(function(err) { + log.info(err); }); }); }; diff --git a/static/js/home/display-show.js b/static/js/home/display-show.js index 4ce0e9d2b9..4dc7e8370f 100644 --- a/static/js/home/display-show.js +++ b/static/js/home/display-show.js @@ -1,10 +1,10 @@ MEDUSA.home.displayShow = function() { // eslint-disable-line max-lines $('.imdbPlot').on('click', function() { $(this).prev('span').toggle(); - if ($(this).html() === "..show less") { - $(this).html("..show more"); + if ($(this).html() === '..show less') { + $(this).html('..show more'); } else { - $(this).html("..show less"); + $(this).html('..show less'); } moveSummaryBackground(); movecheckboxControlsBackground(); @@ -12,17 +12,17 @@ MEDUSA.home.displayShow = function() { // eslint-disable-line max-lines // adjust the summary background position and size on page load and resize function moveSummaryBackground() { - var height = $("#summary").height() + 10; - var top = $("#summary").offset().top + 5; - $("#summaryBackground").height(height); - $("#summaryBackground").offset({ top: top, left: 0}); + var height = $('#summary').height() + 10; + var top = $('#summary').offset().top + 5; + $('#summaryBackground').height(height); + $('#summaryBackground').offset({top: top, left: 0}); } function movecheckboxControlsBackground() { - var height = $("#checkboxControls").height() + 10; - var top = $("#checkboxControls").offset().top - 3; - $("#checkboxControlsBackground").height(height); - $("#checkboxControlsBackground").offset({ top: top, left: 0}); + var height = $('#checkboxControls').height() + 10; + var top = $('#checkboxControls').offset().top - 3; + $('#checkboxControlsBackground').height(height); + $('#checkboxControlsBackground').offset({top: top, left: 0}); } $(window).resize(function() { @@ -414,7 +414,7 @@ MEDUSA.home.displayShow = function() { // eslint-disable-line max-lines }); // href="home/toggleDisplayShowSpecials/?show=${show.indexerid}" - $('.display-specials a').on('click', function(){ + $('.display-specials a').on('click', function() { api.patch('config', { layout: { show: { @@ -424,8 +424,8 @@ MEDUSA.home.displayShow = function() { // eslint-disable-line max-lines }).then(function(response) { log.info(response.data); window.location.reload(); - }).catch(function(response){ - log.error(response.data); + }).catch(function(err) { + log.error(err.data); }); }); }; diff --git a/static/js/home/index.js b/static/js/home/index.js index 4153bc3054..0e9fd79624 100644 --- a/static/js/home/index.js +++ b/static/js/home/index.js @@ -245,7 +245,7 @@ MEDUSA.home.index = function() { popup.on('mouseleave', function() { $(this).remove(); }); - popup.css({zIndex: '9999'}) + popup.css({zIndex: '9999'}); popup.appendTo('body'); var height = 438; @@ -308,7 +308,7 @@ MEDUSA.home.index = function() { } }); - $('.show-option select').on('change', function(){ + $('.show-option select').on('change', function() { api.patch('config', { layout: { home: $(this).val() @@ -316,12 +316,12 @@ MEDUSA.home.index = function() { }).then(function(response) { log.info(response); window.location.reload(); - }).catch(function (error) { - log.info(error); + }).catch(function(err) { + log.info(err); }); }); - $('#showRootDir').on('change', function(){ + $('#showRootDir').on('change', function() { api.patch('config', { showsRoot: { id: $(this).val() @@ -329,9 +329,8 @@ MEDUSA.home.index = function() { }).then(function(response) { log.info(response); window.location.reload(); - }).catch(function (error) { - log.info(error); + }).catch(function(err) { + log.info(err); }); }); - }; diff --git a/static/js/home/snatch-selection.js b/static/js/home/snatch-selection.js index 4bc687ccef..e85f869f7e 100644 --- a/static/js/home/snatch-selection.js +++ b/static/js/home/snatch-selection.js @@ -1,20 +1,20 @@ MEDUSA.home.snatchSelection = function() { $('.imdbPlot').on('click', function() { $(this).prev('span').toggle(); - if ($(this).html() === "..show less") { - $(this).html("..show more"); + if ($(this).html() === '..show less') { + $(this).html('..show more'); } else { - $(this).html("..show less"); + $(this).html('..show less'); } moveSummaryBackground(); }); // adjust the summary background position and size on page load and resize function moveSummaryBackground() { - var height = $("#summary").height() + 10; - var top = $("#summary").offset().top + 5; - $("#summaryBackground").height(height); - $("#summaryBackground").offset({ top: top, left: 0}); + var height = $('#summary').height() + 10; + var top = $('#summary').offset().top + 5; + $('#summaryBackground').height(height); + $('#summaryBackground').offset({top: top, left: 0}); } $(window).resize(function() { @@ -99,10 +99,10 @@ MEDUSA.home.snatchSelection = function() { var data = $('meta[data-last-prov-updates]').data('last-prov-updates'); var manualSearchType = $('meta[data-last-prov-updates]').attr('data-manual-search-type'); - var urlParams = show + '&season=' + season + '&episode=' + episode; + var urlParams = show + '&season=' + season + '&episode=' + episode; - if (manualSearchType == 'season') { - urlParams += '&manual_search_type=' + manualSearchType + if (manualSearchType === 'season') { + urlParams += '&manual_search_type=' + manualSearchType; } if (!$.isNumeric(show) || !$.isNumeric(season) || !$.isNumeric(episode)) { diff --git a/static/js/quality-chooser.js b/static/js/quality-chooser.js index 1f723f85e7..d435284e82 100644 --- a/static/js/quality-chooser.js +++ b/static/js/quality-chooser.js @@ -31,50 +31,50 @@ $(document).ready(function() { function backloggedEpisodes() { var selectedPreffered = []; var selectedAllowed = []; - $('#preferred_qualities :selected').each(function(i, selected){ + $('#preferred_qualities :selected').each(function(i, selected) { selectedPreffered[i] = $(selected).val(); }); - $('#allowed_qualities :selected').each(function(i, selected){ + $('#allowed_qualities :selected').each(function(i, selected) { selectedAllowed[i] = $(selected).val(); }); - var url = 'show/' + $('#showIndexerSlug').attr('value') + + var url = 'show/' + $('#showIndexerSlug').attr('value') + '/backlogged' + '?allowed=' + selectedAllowed + - '&preferred=' + selectedPreffered + '&preferred=' + selectedPreffered; api.get(url).then(function(response) { - var newBacklogged = response.data.new - var existingBacklogged = response.data.existing - var variation = Math.abs(newBacklogged - existingBacklogged) - var html = 'Currently you have ' + existingBacklogged + ' backlogged episodes.
    ' - if (newBacklogged == -1 || existingBacklogged == -1) { - html = 'No qualities selected' + var newBacklogged = response.data.new; + var existingBacklogged = response.data.existing; + var variation = Math.abs(newBacklogged - existingBacklogged); + var html = 'Currently you have ' + existingBacklogged + ' backlogged episodes.
    '; + if (newBacklogged === -1 || existingBacklogged === -1) { + html = 'No qualities selected'; } else if (newBacklogged === existingBacklogged) { - html += 'This change won\'t affect your backlogged episodes' + html += 'This change won\'t affect your backlogged episodes'; } else if (newBacklogged > existingBacklogged) { - html += '
    WARNING: your backlogged episodes will increase by ' + variation + '' - html+= '.
    Total new backlogged: ' + newBacklogged + '' + html += '
    WARNING: your backlogged episodes will increase by ' + variation + ''; + html += '.
    Total new backlogged: ' + newBacklogged + ''; // Only show the archive action div if we have backlog increase $('#archive').show(); } else { - html += 'Your backlogged episodes will decrease by ' + variation + '' - html+= '.
    Total new backlogged: ' + newBacklogged + '' + html += 'Your backlogged episodes will decrease by ' + variation + ''; + html += '.
    Total new backlogged: ' + newBacklogged + ''; } $('#backlogged_episodes').html(html); }); } function archiveEpisodes() { - var url = 'show/' + $('#showIndexerName').attr('value') + $('#showID').attr('value') + - '/archiveEpisodes' + var url = 'show/' + $('#showIndexerName').attr('value') + $('#showID').attr('value') + + '/archiveEpisodes'; api.get(url).then(function(response) { - var archivedStatus = response.data.archived - var html = '' + var archivedStatus = response.data.archived; + var html = ''; if (archivedStatus) { - html = 'Successfuly archived episodes' + html = 'Successfuly archived episodes'; // Recalculate backlogged episodes after we archive it backloggedEpisodes(); } else { - html = 'Not episodes needed to be archived' + html = 'Not episodes needed to be archived'; } $('#archivedStatus').html(html); // Restore button text @@ -90,17 +90,17 @@ $(document).ready(function() { var allowed = $.map($('#allowed_qualities option:selected'), function(option) { return option.text; }); - var both = allowed.concat(preferred.filter(function (item) { + var both = allowed.concat(preferred.filter(function(item) { return allowed.indexOf(item) < 0; })); - var allowed_preferred_explanation = both.join(', '); - var preferred_explanation = preferred.join(', '); - var allowed_explanation = allowed.join(', '); + var allowedPreferredExplanation = both.join(', '); + var preferredExplanation = preferred.join(', '); + var allowedExplanation = allowed.join(', '); - $('#allowed_preferred_explanation').text(allowed_preferred_explanation); - $('#preferred_explanation').text(preferred_explanation); - $('#allowed_explanation').text(allowed_explanation); + $('#allowed_preferred_explanation').text(allowedPreferredExplanation); + $('#preferred_explanation').text(preferredExplanation); + $('#allowed_explanation').text(allowedExplanation); $('#allowed_text').hide(); $('#preferred_text1').hide(); @@ -117,7 +117,7 @@ $(document).ready(function() { } } - $('#archiveEpisodes').on('click', function(){ + $('#archiveEpisodes').on('click', function() { $.get($(this).attr('href')); $(this).val('Archiving...'); archiveEpisodes(); @@ -128,7 +128,7 @@ $(document).ready(function() { setFromPresets($('#qualityPreset :selected').val()); }); - $('#qualityPreset, #preferred_qualities, #allowed_qualities').on('change', function(){ + $('#qualityPreset, #preferred_qualities, #allowed_qualities').on('change', function() { setQualityText(); backloggedEpisodes(); }); diff --git a/static/js/schedule/index.js b/static/js/schedule/index.js index 73b63586be..7b0600270a 100644 --- a/static/js/schedule/index.js +++ b/static/js/schedule/index.js @@ -61,7 +61,7 @@ MEDUSA.schedule.index = function() { $.tablesorter.columnSelector.attachTo($('#showListTable'), '#popover-target'); }); - $('.show-option select[name="layout"]').on('change', function(){ + $('.show-option select[name="layout"]').on('change', function() { api.patch('config', { layout: { schedule: $(this).val() @@ -69,8 +69,8 @@ MEDUSA.schedule.index = function() { }).then(function(response) { log.info(response); window.location.reload(); - }).catch(function (error) { - log.info(error); + }).catch(function(err) { + log.info(err); }); }); }; From 50bfec6bddea7b503f685500be9271332a7da34f Mon Sep 17 00:00:00 2001 From: Thraxis Date: Tue, 21 Feb 2017 21:17:40 -0800 Subject: [PATCH 026/344] Pull up to one one --- static/js/common/init.js | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/static/js/common/init.js b/static/js/common/init.js index d5666a75bd..82943a3863 100644 --- a/static/js/common/init.js +++ b/static/js/common/init.js @@ -224,14 +224,12 @@ function rgb2hex(rgb) { var revertBackgroundColor; // used to revert back to original background-color after highlight var allCells = $('.triggerhighlight'); -allCells -.on('mouseover', function() { +allCells.on('mouseover', function() { var el = $(this); var pos = el.index(); var revertBackgroundColor = rgb2hex($(this).parent().css('background-color')); // fetch the original background-color to revert back to var highlightBackgroundColor = colorLuminance(revertBackgroundColor, -0.15); // change highlight color based on original color el.parent().find('.triggerhighlight').css('background-color', highlightBackgroundColor); // setting highlight background-color -}) -.on('mouseout', function() { +}).on('mouseout', function() { $(this).parent().find('.triggerhighlight').css('background-color', revertBackgroundColor); // reverting back to original background-color }); From 7d6fc27f9daee1c491e33974acebfd7fd8c3a776 Mon Sep 17 00:00:00 2001 From: Thraxis Date: Tue, 21 Feb 2017 22:47:20 -0800 Subject: [PATCH 027/344] small change to try and force travis --- static/js/common/init.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/static/js/common/init.js b/static/js/common/init.js index 82943a3863..467d47dfce 100644 --- a/static/js/common/init.js +++ b/static/js/common/init.js @@ -15,7 +15,7 @@ MEDUSA.common.init = function() { if ($('#sub-menu-container').length === 0) { offset = '50px'; } - if ($(window).width() < 1281) { + if ($(window).width() < 1280) { offset = '50px'; } return offset; From 7c356ce70a00ad975dd9d5518d6da22c367bc2f4 Mon Sep 17 00:00:00 2001 From: Fernando Date: Wed, 22 Feb 2017 05:56:14 -0300 Subject: [PATCH 028/344] Fix editing root dir in mass update, redirects to default home page (#2266) --- static/js/mass-edit.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/static/js/mass-edit.js b/static/js/mass-edit.js index 0fc0cc5459..64b50360b2 100644 --- a/static/js/mass-edit.js +++ b/static/js/mass-edit.js @@ -14,7 +14,8 @@ $(document).ready(function() { $('#display_new_root_dir_' + curIndex).html('' + $(this).val() + ''); }); - $('.edit_root_dir').on('click', function() { + $('.edit_root_dir').on('click', function(event) { + event.preventDefault(); var curIndex = findDirIndex($(this).attr('id')); var initialDir = $('#new_root_dir_' + curIndex).val(); $(this).nFileBrowser(editRootDir, { From 551e182fa40b103e54d981f61c4c796ef32b969b Mon Sep 17 00:00:00 2001 From: Thraxis Date: Wed, 22 Feb 2017 02:58:08 -0800 Subject: [PATCH 029/344] will fix broken highlight code and move it into the init function (#2270) * will fix broken highlight code and move it into the init function * Commit to kick Travis --- static/js/common/init.js | 80 ++++++++++++++++++++-------------------- 1 file changed, 40 insertions(+), 40 deletions(-) diff --git a/static/js/common/init.js b/static/js/common/init.js index 467d47dfce..2a00c04de6 100644 --- a/static/js/common/init.js +++ b/static/js/common/init.js @@ -20,11 +20,50 @@ MEDUSA.common.init = function() { } return offset; } - + $(window).resize(function() { $('.backstretch').css('top', backstretchOffset()); }); + // function to change luminance of #000000 color - used in triggerhighlighting + function colorLuminance(hex, lum) { + hex = String(hex).replace(/[^0-9a-f]/gi, ''); + if (hex.length < 6) { + hex = hex[0] + hex[0] + hex[1] + hex[1] + hex[2] + hex[2]; + } + lum = lum || 0; + var rgb = '#'; + var c; + var i; + for (i = 0; i < 3; i++) { + c = parseInt(hex.substr(i * 2, 2), 16); + c = Math.round(Math.min(Math.max(0, c + (c * lum)), 255)).toString(16); + rgb += ('00' + c).substr(c.length); + } + return rgb; + } + + // function to convert rgb(0,0,0) into #000000 + function rgb2hex(rgb) { + rgb = rgb.match(/^rgb\((\d+),\s*(\d+),\s*(\d+)\)$/); + function hex(x) { + return ('0' + parseInt(x).toString(16)).slice(-2); + } + return '#' + hex(rgb[1]) + hex(rgb[2]) + hex(rgb[3]); + } + + var revertBackgroundColor; // used to revert back to original background-color after highlight + var allCells = $('.triggerhighlight'); + allCells.on('mouseover', function() { + var el = $(this); + var pos = el.index(); + revertBackgroundColor = rgb2hex($(this).parent().css('background-color')); // fetch the original background-color to revert back to + var highlightBackgroundColor = colorLuminance(revertBackgroundColor, -0.15); // change highlight color based on original color + el.parent().find('.triggerhighlight').css('background-color', highlightBackgroundColor); // setting highlight background-color + }).on('mouseout', function() { + $(this).parent().find('.triggerhighlight').css('background-color', revertBackgroundColor); // reverting back to original background-color + }); + $.confirm.options = { confirmButton: 'Yes', cancelButton: 'Cancel', @@ -194,42 +233,3 @@ MEDUSA.common.init = function() { }); }); }; - -// function to change luminance of #000000 color - used in triggerhighlighting -function colorLuminance(hex, lum) { - hex = String(hex).replace(/[^0-9a-f]/gi, ''); - if (hex.length < 6) { - hex = hex[0] + hex[0] + hex[1] + hex[1] + hex[2] + hex[2]; - } - lum = lum || 0; - var rgb = '#'; - var c; - var i; - for (i = 0; i < 3; i++) { - c = parseInt(hex.substr(i * 2, 2), 16); - c = Math.round(Math.min(Math.max(0, c + (c * lum)), 255)).toString(16); - rgb += ('00' + c).substr(c.length); - } - return rgb; -} - - // function to convert rgb(0,0,0) into #000000 -function rgb2hex(rgb) { - rgb = rgb.match(/^rgb\((\d+),\s*(\d+),\s*(\d+)\)$/); - function hex(x) { - return ('0' + parseInt(x).toString(16)).slice(-2); - } - return '#' + hex(rgb[1]) + hex(rgb[2]) + hex(rgb[3]); -} - -var revertBackgroundColor; // used to revert back to original background-color after highlight -var allCells = $('.triggerhighlight'); -allCells.on('mouseover', function() { - var el = $(this); - var pos = el.index(); - var revertBackgroundColor = rgb2hex($(this).parent().css('background-color')); // fetch the original background-color to revert back to - var highlightBackgroundColor = colorLuminance(revertBackgroundColor, -0.15); // change highlight color based on original color - el.parent().find('.triggerhighlight').css('background-color', highlightBackgroundColor); // setting highlight background-color -}).on('mouseout', function() { - $(this).parent().find('.triggerhighlight').css('background-color', revertBackgroundColor); // reverting back to original background-color -}); From 96d6c2f670bfc59216627b84b005296c125fd59c Mon Sep 17 00:00:00 2001 From: Dario Date: Wed, 22 Feb 2017 22:10:31 +0100 Subject: [PATCH 030/344] Use smart rar_basename() instead of list comprehensions (#2145) * Use smart rar_basename() instead of list comprehensions * Fix import * Use splitext instead of rpartition, add docstring, rename some variables * Update rarfile to version 3.0 * Use pathlib2 (2.2.1) instead of fnmatch * Revert rarfile to version 2.8 * Update scandir to version 1.5 * Update rarfile version 2.8 * Fix rename of files with rar name, remove useless check * Add rarfile error handling * Update rarfile to version 3.0 --- lib/pathlib2.py | 1654 +++++++++++++++++++++++++ lib/rarfile.py | 2465 ++++++++++++++++++++++++++------------ lib/scandir.py | 671 +++++++++++ medusa/post_processor.py | 193 ++- 4 files changed, 4125 insertions(+), 858 deletions(-) create mode 100644 lib/pathlib2.py create mode 100644 lib/scandir.py diff --git a/lib/pathlib2.py b/lib/pathlib2.py new file mode 100644 index 0000000000..6af4a45e61 --- /dev/null +++ b/lib/pathlib2.py @@ -0,0 +1,1654 @@ +# Copyright (c) 2014-2017 Matthias C. M. Troffaes +# Copyright (c) 2012-2014 Antoine Pitrou and contributors +# Distributed under the terms of the MIT License. + +import ctypes +import fnmatch +import functools +import io +import ntpath +import os +import posixpath +import re +import six +import sys +from collections import Sequence +from errno import EINVAL, ENOENT, ENOTDIR, EEXIST, EPERM, EACCES +from operator import attrgetter +from stat import ( + S_ISDIR, S_ISLNK, S_ISREG, S_ISSOCK, S_ISBLK, S_ISCHR, S_ISFIFO) +try: + from urllib import quote as urlquote_from_bytes +except ImportError: + from urllib.parse import quote_from_bytes as urlquote_from_bytes + + +try: + intern = intern +except NameError: + intern = sys.intern + +supports_symlinks = True +try: + import nt +except ImportError: + nt = None +else: + if sys.getwindowsversion()[:2] >= (6, 0) and sys.version_info >= (3, 2): + from nt import _getfinalpathname + else: + supports_symlinks = False + _getfinalpathname = None + +try: + from os import scandir as os_scandir +except ImportError: + from scandir import scandir as os_scandir + +__all__ = [ + "PurePath", "PurePosixPath", "PureWindowsPath", + "Path", "PosixPath", "WindowsPath", + ] + +# +# Internals +# + + +def _py2_fsencode(parts): + # py2 => minimal unicode support + assert six.PY2 + return [part.encode('ascii') if isinstance(part, six.text_type) + else part for part in parts] + + +def _try_except_fileexistserror(try_func, except_func): + if sys.version_info >= (3, 3): + try: + try_func() + except FileExistsError as exc: + except_func(exc) + else: + try: + try_func() + except EnvironmentError as exc: + if exc.errno != EEXIST: + raise + else: + except_func(exc) + + +def _try_except_permissionerror_iter(try_iter, except_iter): + if sys.version_info >= (3, 3): + try: + for x in try_iter(): + yield x + except PermissionError as exc: + for x in except_iter(exc): + yield x + else: + try: + for x in try_iter(): + yield x + except EnvironmentError as exc: + if exc.errno not in (EPERM, EACCES): + raise + else: + for x in except_iter(exc): + yield x + + +def _win32_get_unique_path_id(path): + # get file information, needed for samefile on older Python versions + # see http://timgolden.me.uk/python/win32_how_do_i/ + # see_if_two_files_are_the_same_file.html + from ctypes import POINTER, Structure, WinError + from ctypes.wintypes import DWORD, HANDLE, BOOL + + class FILETIME(Structure): + _fields_ = [("datetime_lo", DWORD), + ("datetime_hi", DWORD), + ] + + class BY_HANDLE_FILE_INFORMATION(Structure): + _fields_ = [("attributes", DWORD), + ("created_at", FILETIME), + ("accessed_at", FILETIME), + ("written_at", FILETIME), + ("volume", DWORD), + ("file_hi", DWORD), + ("file_lo", DWORD), + ("n_links", DWORD), + ("index_hi", DWORD), + ("index_lo", DWORD), + ] + + CreateFile = ctypes.windll.kernel32.CreateFileW + CreateFile.argtypes = [ctypes.c_wchar_p, DWORD, DWORD, ctypes.c_void_p, + DWORD, DWORD, HANDLE] + CreateFile.restype = HANDLE + GetFileInformationByHandle = ( + ctypes.windll.kernel32.GetFileInformationByHandle) + GetFileInformationByHandle.argtypes = [ + HANDLE, POINTER(BY_HANDLE_FILE_INFORMATION)] + GetFileInformationByHandle.restype = BOOL + CloseHandle = ctypes.windll.kernel32.CloseHandle + CloseHandle.argtypes = [HANDLE] + CloseHandle.restype = BOOL + GENERIC_READ = 0x80000000 + FILE_SHARE_READ = 0x00000001 + FILE_FLAG_BACKUP_SEMANTICS = 0x02000000 + OPEN_EXISTING = 3 + if os.path.isdir(path): + flags = FILE_FLAG_BACKUP_SEMANTICS + else: + flags = 0 + hfile = CreateFile(path, GENERIC_READ, FILE_SHARE_READ, + None, OPEN_EXISTING, flags, None) + if hfile == 0xffffffff: + if sys.version_info >= (3, 3): + raise FileNotFoundError(path) + else: + exc = OSError("file not found: path") + exc.errno = ENOENT + raise exc + info = BY_HANDLE_FILE_INFORMATION() + success = GetFileInformationByHandle(hfile, info) + CloseHandle(hfile) + if success == 0: + raise WinError() + return info.volume, info.index_hi, info.index_lo + + +def _is_wildcard_pattern(pat): + # Whether this pattern needs actual matching using fnmatch, or can + # be looked up directly as a file. + return "*" in pat or "?" in pat or "[" in pat + + +class _Flavour(object): + + """A flavour implements a particular (platform-specific) set of path + semantics.""" + + def __init__(self): + self.join = self.sep.join + + def parse_parts(self, parts): + if six.PY2: + parts = _py2_fsencode(parts) + parsed = [] + sep = self.sep + altsep = self.altsep + drv = root = '' + it = reversed(parts) + for part in it: + if not part: + continue + if altsep: + part = part.replace(altsep, sep) + drv, root, rel = self.splitroot(part) + if sep in rel: + for x in reversed(rel.split(sep)): + if x and x != '.': + parsed.append(intern(x)) + else: + if rel and rel != '.': + parsed.append(intern(rel)) + if drv or root: + if not drv: + # If no drive is present, try to find one in the previous + # parts. This makes the result of parsing e.g. + # ("C:", "/", "a") reasonably intuitive. + for part in it: + if not part: + continue + if altsep: + part = part.replace(altsep, sep) + drv = self.splitroot(part)[0] + if drv: + break + break + if drv or root: + parsed.append(drv + root) + parsed.reverse() + return drv, root, parsed + + def join_parsed_parts(self, drv, root, parts, drv2, root2, parts2): + """ + Join the two paths represented by the respective + (drive, root, parts) tuples. Return a new (drive, root, parts) tuple. + """ + if root2: + if not drv2 and drv: + return drv, root2, [drv + root2] + parts2[1:] + elif drv2: + if drv2 == drv or self.casefold(drv2) == self.casefold(drv): + # Same drive => second path is relative to the first + return drv, root, parts + parts2[1:] + else: + # Second path is non-anchored (common case) + return drv, root, parts + parts2 + return drv2, root2, parts2 + + +class _WindowsFlavour(_Flavour): + # Reference for Windows paths can be found at + # http://msdn.microsoft.com/en-us/library/aa365247%28v=vs.85%29.aspx + + sep = '\\' + altsep = '/' + has_drv = True + pathmod = ntpath + + is_supported = (os.name == 'nt') + + drive_letters = ( + set(chr(x) for x in range(ord('a'), ord('z') + 1)) | + set(chr(x) for x in range(ord('A'), ord('Z') + 1)) + ) + ext_namespace_prefix = '\\\\?\\' + + reserved_names = ( + set(['CON', 'PRN', 'AUX', 'NUL']) | + set(['COM%d' % i for i in range(1, 10)]) | + set(['LPT%d' % i for i in range(1, 10)]) + ) + + # Interesting findings about extended paths: + # - '\\?\c:\a', '//?/c:\a' and '//?/c:/a' are all supported + # but '\\?\c:/a' is not + # - extended paths are always absolute; "relative" extended paths will + # fail. + + def splitroot(self, part, sep=sep): + first = part[0:1] + second = part[1:2] + if (second == sep and first == sep): + # XXX extended paths should also disable the collapsing of "." + # components (according to MSDN docs). + prefix, part = self._split_extended_path(part) + first = part[0:1] + second = part[1:2] + else: + prefix = '' + third = part[2:3] + if (second == sep and first == sep and third != sep): + # is a UNC path: + # vvvvvvvvvvvvvvvvvvvvv root + # \\machine\mountpoint\directory\etc\... + # directory ^^^^^^^^^^^^^^ + index = part.find(sep, 2) + if index != -1: + index2 = part.find(sep, index + 1) + # a UNC path can't have two slashes in a row + # (after the initial two) + if index2 != index + 1: + if index2 == -1: + index2 = len(part) + if prefix: + return prefix + part[1:index2], sep, part[index2 + 1:] + else: + return part[:index2], sep, part[index2 + 1:] + drv = root = '' + if second == ':' and first in self.drive_letters: + drv = part[:2] + part = part[2:] + first = third + if first == sep: + root = first + part = part.lstrip(sep) + return prefix + drv, root, part + + def casefold(self, s): + return s.lower() + + def casefold_parts(self, parts): + return [p.lower() for p in parts] + + def resolve(self, path, strict=False): + s = str(path) + if not s: + return os.getcwd() + previous_s = None + if _getfinalpathname is not None: + if strict: + return self._ext_to_normal(_getfinalpathname(s)) + else: + while True: + try: + s = self._ext_to_normal(_getfinalpathname(s)) + except FileNotFoundError: + previous_s = s + s = os.path.abspath(os.path.join(s, os.pardir)) + else: + if previous_s is None: + return s + else: + return ( + s + os.path.sep + + os.path.basename(previous_s)) + # Means fallback on absolute + return None + + def _split_extended_path(self, s, ext_prefix=ext_namespace_prefix): + prefix = '' + if s.startswith(ext_prefix): + prefix = s[:4] + s = s[4:] + if s.startswith('UNC\\'): + prefix += s[:3] + s = '\\' + s[3:] + return prefix, s + + def _ext_to_normal(self, s): + # Turn back an extended path into a normal DOS-like path + return self._split_extended_path(s)[1] + + def is_reserved(self, parts): + # NOTE: the rules for reserved names seem somewhat complicated + # (e.g. r"..\NUL" is reserved but not r"foo\NUL"). + # We err on the side of caution and return True for paths which are + # not considered reserved by Windows. + if not parts: + return False + if parts[0].startswith('\\\\'): + # UNC paths are never reserved + return False + return parts[-1].partition('.')[0].upper() in self.reserved_names + + def make_uri(self, path): + # Under Windows, file URIs use the UTF-8 encoding. + drive = path.drive + if len(drive) == 2 and drive[1] == ':': + # It's a path on a local drive => 'file:///c:/a/b' + rest = path.as_posix()[2:].lstrip('/') + return 'file:///%s/%s' % ( + drive, urlquote_from_bytes(rest.encode('utf-8'))) + else: + # It's a path on a network drive => 'file://host/share/a/b' + return 'file:' + urlquote_from_bytes( + path.as_posix().encode('utf-8')) + + def gethomedir(self, username): + if 'HOME' in os.environ: + userhome = os.environ['HOME'] + elif 'USERPROFILE' in os.environ: + userhome = os.environ['USERPROFILE'] + elif 'HOMEPATH' in os.environ: + try: + drv = os.environ['HOMEDRIVE'] + except KeyError: + drv = '' + userhome = drv + os.environ['HOMEPATH'] + else: + raise RuntimeError("Can't determine home directory") + + if username: + # Try to guess user home directory. By default all users + # directories are located in the same place and are named by + # corresponding usernames. If current user home directory points + # to nonstandard place, this guess is likely wrong. + if os.environ['USERNAME'] != username: + drv, root, parts = self.parse_parts((userhome,)) + if parts[-1] != os.environ['USERNAME']: + raise RuntimeError("Can't determine home directory " + "for %r" % username) + parts[-1] = username + if drv or root: + userhome = drv + root + self.join(parts[1:]) + else: + userhome = self.join(parts) + return userhome + + +class _PosixFlavour(_Flavour): + sep = '/' + altsep = '' + has_drv = False + pathmod = posixpath + + is_supported = (os.name != 'nt') + + def splitroot(self, part, sep=sep): + if part and part[0] == sep: + stripped_part = part.lstrip(sep) + # According to POSIX path resolution: + # http://pubs.opengroup.org/onlinepubs/009695399/basedefs/ + # xbd_chap04.html#tag_04_11 + # "A pathname that begins with two successive slashes may be + # interpreted in an implementation-defined manner, although more + # than two leading slashes shall be treated as a single slash". + if len(part) - len(stripped_part) == 2: + return '', sep * 2, stripped_part + else: + return '', sep, stripped_part + else: + return '', '', part + + def casefold(self, s): + return s + + def casefold_parts(self, parts): + return parts + + def resolve(self, path, strict=False): + sep = self.sep + accessor = path._accessor + seen = {} + + def _resolve(path, rest): + if rest.startswith(sep): + path = '' + + for name in rest.split(sep): + if not name or name == '.': + # current dir + continue + if name == '..': + # parent dir + path, _, _ = path.rpartition(sep) + continue + newpath = path + sep + name + if newpath in seen: + # Already seen this path + path = seen[newpath] + if path is not None: + # use cached value + continue + # The symlink is not resolved, so we must have a symlink + # loop. + raise RuntimeError("Symlink loop from %r" % newpath) + # Resolve the symbolic link + try: + target = accessor.readlink(newpath) + except OSError as e: + if e.errno != EINVAL: + if strict: + raise + else: + return newpath + # Not a symlink + path = newpath + else: + seen[newpath] = None # not resolved symlink + path = _resolve(path, target) + seen[newpath] = path # resolved symlink + + return path + # NOTE: according to POSIX, getcwd() cannot contain path components + # which are symlinks. + base = '' if path.is_absolute() else os.getcwd() + return _resolve(base, str(path)) or sep + + def is_reserved(self, parts): + return False + + def make_uri(self, path): + # We represent the path using the local filesystem encoding, + # for portability to other applications. + bpath = bytes(path) + return 'file://' + urlquote_from_bytes(bpath) + + def gethomedir(self, username): + if not username: + try: + return os.environ['HOME'] + except KeyError: + import pwd + return pwd.getpwuid(os.getuid()).pw_dir + else: + import pwd + try: + return pwd.getpwnam(username).pw_dir + except KeyError: + raise RuntimeError("Can't determine home directory " + "for %r" % username) + + +_windows_flavour = _WindowsFlavour() +_posix_flavour = _PosixFlavour() + + +class _Accessor: + + """An accessor implements a particular (system-specific or not) way of + accessing paths on the filesystem.""" + + +class _NormalAccessor(_Accessor): + + def _wrap_strfunc(strfunc): + @functools.wraps(strfunc) + def wrapped(pathobj, *args): + return strfunc(str(pathobj), *args) + return staticmethod(wrapped) + + def _wrap_binary_strfunc(strfunc): + @functools.wraps(strfunc) + def wrapped(pathobjA, pathobjB, *args): + return strfunc(str(pathobjA), str(pathobjB), *args) + return staticmethod(wrapped) + + stat = _wrap_strfunc(os.stat) + + lstat = _wrap_strfunc(os.lstat) + + open = _wrap_strfunc(os.open) + + listdir = _wrap_strfunc(os.listdir) + + scandir = _wrap_strfunc(os_scandir) + + chmod = _wrap_strfunc(os.chmod) + + if hasattr(os, "lchmod"): + lchmod = _wrap_strfunc(os.lchmod) + else: + def lchmod(self, pathobj, mode): + raise NotImplementedError("lchmod() not available on this system") + + mkdir = _wrap_strfunc(os.mkdir) + + unlink = _wrap_strfunc(os.unlink) + + rmdir = _wrap_strfunc(os.rmdir) + + rename = _wrap_binary_strfunc(os.rename) + + if sys.version_info >= (3, 3): + replace = _wrap_binary_strfunc(os.replace) + + if nt: + if supports_symlinks: + symlink = _wrap_binary_strfunc(os.symlink) + else: + def symlink(a, b, target_is_directory): + raise NotImplementedError( + "symlink() not available on this system") + else: + # Under POSIX, os.symlink() takes two args + @staticmethod + def symlink(a, b, target_is_directory): + return os.symlink(str(a), str(b)) + + utime = _wrap_strfunc(os.utime) + + # Helper for resolve() + def readlink(self, path): + return os.readlink(path) + + +_normal_accessor = _NormalAccessor() + + +# +# Globbing helpers +# + +def _make_selector(pattern_parts): + pat = pattern_parts[0] + child_parts = pattern_parts[1:] + if pat == '**': + cls = _RecursiveWildcardSelector + elif '**' in pat: + raise ValueError( + "Invalid pattern: '**' can only be an entire path component") + elif _is_wildcard_pattern(pat): + cls = _WildcardSelector + else: + cls = _PreciseSelector + return cls(pat, child_parts) + + +if hasattr(functools, "lru_cache"): + _make_selector = functools.lru_cache()(_make_selector) + + +class _Selector: + + """A selector matches a specific glob pattern part against the children + of a given path.""" + + def __init__(self, child_parts): + self.child_parts = child_parts + if child_parts: + self.successor = _make_selector(child_parts) + self.dironly = True + else: + self.successor = _TerminatingSelector() + self.dironly = False + + def select_from(self, parent_path): + """Iterate over all child paths of `parent_path` matched by this + selector. This can contain parent_path itself.""" + path_cls = type(parent_path) + is_dir = path_cls.is_dir + exists = path_cls.exists + scandir = parent_path._accessor.scandir + if not is_dir(parent_path): + return iter([]) + return self._select_from(parent_path, is_dir, exists, scandir) + + +class _TerminatingSelector: + + def _select_from(self, parent_path, is_dir, exists, scandir): + yield parent_path + + +class _PreciseSelector(_Selector): + + def __init__(self, name, child_parts): + self.name = name + _Selector.__init__(self, child_parts) + + def _select_from(self, parent_path, is_dir, exists, scandir): + def try_iter(): + path = parent_path._make_child_relpath(self.name) + if (is_dir if self.dironly else exists)(path): + for p in self.successor._select_from( + path, is_dir, exists, scandir): + yield p + + def except_iter(): + return + yield + + for x in _try_except_permissionerror_iter(try_iter, except_iter): + yield x + + +class _WildcardSelector(_Selector): + + def __init__(self, pat, child_parts): + self.pat = re.compile(fnmatch.translate(pat)) + _Selector.__init__(self, child_parts) + + def _select_from(self, parent_path, is_dir, exists, scandir): + def try_iter(): + cf = parent_path._flavour.casefold + entries = list(scandir(parent_path)) + for entry in entries: + if not self.dironly or entry.is_dir(): + name = entry.name + casefolded = cf(name) + if self.pat.match(casefolded): + path = parent_path._make_child_relpath(name) + for p in self.successor._select_from( + path, is_dir, exists, scandir): + yield p + + def except_iter(): + return + yield + + for x in _try_except_permissionerror_iter(try_iter, except_iter): + yield x + + +class _RecursiveWildcardSelector(_Selector): + + def __init__(self, pat, child_parts): + _Selector.__init__(self, child_parts) + + def _iterate_directories(self, parent_path, is_dir, scandir): + yield parent_path + + def try_iter(): + entries = list(scandir(parent_path)) + for entry in entries: + if entry.is_dir() and not entry.is_symlink(): + path = parent_path._make_child_relpath(entry.name) + for p in self._iterate_directories(path, is_dir, scandir): + yield p + + def except_iter(): + return + yield + + for x in _try_except_permissionerror_iter(try_iter, except_iter): + yield x + + def _select_from(self, parent_path, is_dir, exists, scandir): + def try_iter(): + yielded = set() + try: + successor_select = self.successor._select_from + for starting_point in self._iterate_directories( + parent_path, is_dir, scandir): + for p in successor_select( + starting_point, is_dir, exists, scandir): + if p not in yielded: + yield p + yielded.add(p) + finally: + yielded.clear() + + def except_iter(): + return + yield + + for x in _try_except_permissionerror_iter(try_iter, except_iter): + yield x + + +# +# Public API +# + +class _PathParents(Sequence): + + """This object provides sequence-like access to the logical ancestors + of a path. Don't try to construct it yourself.""" + __slots__ = ('_pathcls', '_drv', '_root', '_parts') + + def __init__(self, path): + # We don't store the instance to avoid reference cycles + self._pathcls = type(path) + self._drv = path._drv + self._root = path._root + self._parts = path._parts + + def __len__(self): + if self._drv or self._root: + return len(self._parts) - 1 + else: + return len(self._parts) + + def __getitem__(self, idx): + if idx < 0 or idx >= len(self): + raise IndexError(idx) + return self._pathcls._from_parsed_parts(self._drv, self._root, + self._parts[:-idx - 1]) + + def __repr__(self): + return "<{0}.parents>".format(self._pathcls.__name__) + + +class PurePath(object): + + """PurePath represents a filesystem path and offers operations which + don't imply any actual filesystem I/O. Depending on your system, + instantiating a PurePath will return either a PurePosixPath or a + PureWindowsPath object. You can also instantiate either of these classes + directly, regardless of your system. + """ + __slots__ = ( + '_drv', '_root', '_parts', + '_str', '_hash', '_pparts', '_cached_cparts', + ) + + def __new__(cls, *args): + """Construct a PurePath from one or several strings and or existing + PurePath objects. The strings and path objects are combined so as + to yield a canonicalized path, which is incorporated into the + new PurePath object. + """ + if cls is PurePath: + cls = PureWindowsPath if os.name == 'nt' else PurePosixPath + return cls._from_parts(args) + + def __reduce__(self): + # Using the parts tuple helps share interned path parts + # when pickling related paths. + return (self.__class__, tuple(self._parts)) + + @classmethod + def _parse_args(cls, args): + # This is useful when you don't want to create an instance, just + # canonicalize some constructor arguments. + parts = [] + for a in args: + if isinstance(a, PurePath): + parts += a._parts + else: + if sys.version_info >= (3, 6): + a = os.fspath(a) + else: + # duck typing for older Python versions + if hasattr(a, "__fspath__"): + a = a.__fspath__() + if isinstance(a, str): + # Force-cast str subclasses to str (issue #21127) + parts.append(str(a)) + # also handle unicode for PY2 (six.text_type = unicode) + elif six.PY2 and isinstance(a, six.text_type): + # cast to str using filesystem encoding + parts.append(a.encode(sys.getfilesystemencoding())) + else: + raise TypeError( + "argument should be a str object or an os.PathLike " + "object returning str, not %r" + % type(a)) + return cls._flavour.parse_parts(parts) + + @classmethod + def _from_parts(cls, args, init=True): + # We need to call _parse_args on the instance, so as to get the + # right flavour. + self = object.__new__(cls) + drv, root, parts = self._parse_args(args) + self._drv = drv + self._root = root + self._parts = parts + if init: + self._init() + return self + + @classmethod + def _from_parsed_parts(cls, drv, root, parts, init=True): + self = object.__new__(cls) + self._drv = drv + self._root = root + self._parts = parts + if init: + self._init() + return self + + @classmethod + def _format_parsed_parts(cls, drv, root, parts): + if drv or root: + return drv + root + cls._flavour.join(parts[1:]) + else: + return cls._flavour.join(parts) + + def _init(self): + # Overridden in concrete Path + pass + + def _make_child(self, args): + drv, root, parts = self._parse_args(args) + drv, root, parts = self._flavour.join_parsed_parts( + self._drv, self._root, self._parts, drv, root, parts) + return self._from_parsed_parts(drv, root, parts) + + def __str__(self): + """Return the string representation of the path, suitable for + passing to system calls.""" + try: + return self._str + except AttributeError: + self._str = self._format_parsed_parts(self._drv, self._root, + self._parts) or '.' + return self._str + + def __fspath__(self): + return str(self) + + def as_posix(self): + """Return the string representation of the path with forward (/) + slashes.""" + f = self._flavour + return str(self).replace(f.sep, '/') + + def __bytes__(self): + """Return the bytes representation of the path. This is only + recommended to use under Unix.""" + if sys.version_info < (3, 2): + raise NotImplementedError("needs Python 3.2 or later") + return os.fsencode(str(self)) + + def __repr__(self): + return "{0}({1!r})".format(self.__class__.__name__, self.as_posix()) + + def as_uri(self): + """Return the path as a 'file' URI.""" + if not self.is_absolute(): + raise ValueError("relative path can't be expressed as a file URI") + return self._flavour.make_uri(self) + + @property + def _cparts(self): + # Cached casefolded parts, for hashing and comparison + try: + return self._cached_cparts + except AttributeError: + self._cached_cparts = self._flavour.casefold_parts(self._parts) + return self._cached_cparts + + def __eq__(self, other): + if not isinstance(other, PurePath): + return NotImplemented + return ( + self._cparts == other._cparts + and self._flavour is other._flavour) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + try: + return self._hash + except AttributeError: + self._hash = hash(tuple(self._cparts)) + return self._hash + + def __lt__(self, other): + if (not isinstance(other, PurePath) + or self._flavour is not other._flavour): + return NotImplemented + return self._cparts < other._cparts + + def __le__(self, other): + if (not isinstance(other, PurePath) + or self._flavour is not other._flavour): + return NotImplemented + return self._cparts <= other._cparts + + def __gt__(self, other): + if (not isinstance(other, PurePath) + or self._flavour is not other._flavour): + return NotImplemented + return self._cparts > other._cparts + + def __ge__(self, other): + if (not isinstance(other, PurePath) + or self._flavour is not other._flavour): + return NotImplemented + return self._cparts >= other._cparts + + drive = property(attrgetter('_drv'), + doc="""The drive prefix (letter or UNC path), if any.""") + + root = property(attrgetter('_root'), + doc="""The root of the path, if any.""") + + @property + def anchor(self): + """The concatenation of the drive and root, or ''.""" + anchor = self._drv + self._root + return anchor + + @property + def name(self): + """The final path component, if any.""" + parts = self._parts + if len(parts) == (1 if (self._drv or self._root) else 0): + return '' + return parts[-1] + + @property + def suffix(self): + """The final component's last suffix, if any.""" + name = self.name + i = name.rfind('.') + if 0 < i < len(name) - 1: + return name[i:] + else: + return '' + + @property + def suffixes(self): + """A list of the final component's suffixes, if any.""" + name = self.name + if name.endswith('.'): + return [] + name = name.lstrip('.') + return ['.' + suffix for suffix in name.split('.')[1:]] + + @property + def stem(self): + """The final path component, minus its last suffix.""" + name = self.name + i = name.rfind('.') + if 0 < i < len(name) - 1: + return name[:i] + else: + return name + + def with_name(self, name): + """Return a new path with the file name changed.""" + if not self.name: + raise ValueError("%r has an empty name" % (self,)) + drv, root, parts = self._flavour.parse_parts((name,)) + if (not name or name[-1] in [self._flavour.sep, self._flavour.altsep] + or drv or root or len(parts) != 1): + raise ValueError("Invalid name %r" % (name)) + return self._from_parsed_parts(self._drv, self._root, + self._parts[:-1] + [name]) + + def with_suffix(self, suffix): + """Return a new path with the file suffix changed (or added, if + none). + """ + # XXX if suffix is None, should the current suffix be removed? + f = self._flavour + if f.sep in suffix or f.altsep and f.altsep in suffix: + raise ValueError("Invalid suffix %r" % (suffix)) + if suffix and not suffix.startswith('.') or suffix == '.': + raise ValueError("Invalid suffix %r" % (suffix)) + name = self.name + if not name: + raise ValueError("%r has an empty name" % (self,)) + old_suffix = self.suffix + if not old_suffix: + name = name + suffix + else: + name = name[:-len(old_suffix)] + suffix + return self._from_parsed_parts(self._drv, self._root, + self._parts[:-1] + [name]) + + def relative_to(self, *other): + """Return the relative path to another path identified by the passed + arguments. If the operation is not possible (because this is not + a subpath of the other path), raise ValueError. + """ + # For the purpose of this method, drive and root are considered + # separate parts, i.e.: + # Path('c:/').relative_to('c:') gives Path('/') + # Path('c:/').relative_to('/') raise ValueError + if not other: + raise TypeError("need at least one argument") + parts = self._parts + drv = self._drv + root = self._root + if root: + abs_parts = [drv, root] + parts[1:] + else: + abs_parts = parts + to_drv, to_root, to_parts = self._parse_args(other) + if to_root: + to_abs_parts = [to_drv, to_root] + to_parts[1:] + else: + to_abs_parts = to_parts + n = len(to_abs_parts) + cf = self._flavour.casefold_parts + if (root or drv) if n == 0 else cf(abs_parts[:n]) != cf(to_abs_parts): + formatted = self._format_parsed_parts(to_drv, to_root, to_parts) + raise ValueError("{0!r} does not start with {1!r}" + .format(str(self), str(formatted))) + return self._from_parsed_parts('', root if n == 1 else '', + abs_parts[n:]) + + @property + def parts(self): + """An object providing sequence-like access to the + components in the filesystem path.""" + # We cache the tuple to avoid building a new one each time .parts + # is accessed. XXX is this necessary? + try: + return self._pparts + except AttributeError: + self._pparts = tuple(self._parts) + return self._pparts + + def joinpath(self, *args): + """Combine this path with one or several arguments, and return a + new path representing either a subpath (if all arguments are relative + paths) or a totally different path (if one of the arguments is + anchored). + """ + return self._make_child(args) + + def __truediv__(self, key): + return self._make_child((key,)) + + def __rtruediv__(self, key): + return self._from_parts([key] + self._parts) + + if six.PY2: + __div__ = __truediv__ + __rdiv__ = __rtruediv__ + + @property + def parent(self): + """The logical parent of the path.""" + drv = self._drv + root = self._root + parts = self._parts + if len(parts) == 1 and (drv or root): + return self + return self._from_parsed_parts(drv, root, parts[:-1]) + + @property + def parents(self): + """A sequence of this path's logical parents.""" + return _PathParents(self) + + def is_absolute(self): + """True if the path is absolute (has both a root and, if applicable, + a drive).""" + if not self._root: + return False + return not self._flavour.has_drv or bool(self._drv) + + def is_reserved(self): + """Return True if the path contains one of the special names reserved + by the system, if any.""" + return self._flavour.is_reserved(self._parts) + + def match(self, path_pattern): + """ + Return True if this path matches the given pattern. + """ + cf = self._flavour.casefold + path_pattern = cf(path_pattern) + drv, root, pat_parts = self._flavour.parse_parts((path_pattern,)) + if not pat_parts: + raise ValueError("empty pattern") + if drv and drv != cf(self._drv): + return False + if root and root != cf(self._root): + return False + parts = self._cparts + if drv or root: + if len(pat_parts) != len(parts): + return False + pat_parts = pat_parts[1:] + elif len(pat_parts) > len(parts): + return False + for part, pat in zip(reversed(parts), reversed(pat_parts)): + if not fnmatch.fnmatchcase(part, pat): + return False + return True + + +# Can't subclass os.PathLike from PurePath and keep the constructor +# optimizations in PurePath._parse_args(). +if sys.version_info >= (3, 6): + os.PathLike.register(PurePath) + + +class PurePosixPath(PurePath): + _flavour = _posix_flavour + __slots__ = () + + +class PureWindowsPath(PurePath): + _flavour = _windows_flavour + __slots__ = () + + +# Filesystem-accessing classes + + +class Path(PurePath): + __slots__ = ( + '_accessor', + '_closed', + ) + + def __new__(cls, *args, **kwargs): + if cls is Path: + cls = WindowsPath if os.name == 'nt' else PosixPath + self = cls._from_parts(args, init=False) + if not self._flavour.is_supported: + raise NotImplementedError("cannot instantiate %r on your system" + % (cls.__name__,)) + self._init() + return self + + def _init(self, + # Private non-constructor arguments + template=None, + ): + self._closed = False + if template is not None: + self._accessor = template._accessor + else: + self._accessor = _normal_accessor + + def _make_child_relpath(self, part): + # This is an optimization used for dir walking. `part` must be + # a single part relative to this path. + parts = self._parts + [part] + return self._from_parsed_parts(self._drv, self._root, parts) + + def __enter__(self): + if self._closed: + self._raise_closed() + return self + + def __exit__(self, t, v, tb): + self._closed = True + + def _raise_closed(self): + raise ValueError("I/O operation on closed path") + + def _opener(self, name, flags, mode=0o666): + # A stub for the opener argument to built-in open() + return self._accessor.open(self, flags, mode) + + def _raw_open(self, flags, mode=0o777): + """ + Open the file pointed by this path and return a file descriptor, + as os.open() does. + """ + if self._closed: + self._raise_closed() + return self._accessor.open(self, flags, mode) + + # Public API + + @classmethod + def cwd(cls): + """Return a new path pointing to the current working directory + (as returned by os.getcwd()). + """ + return cls(os.getcwd()) + + @classmethod + def home(cls): + """Return a new path pointing to the user's home directory (as + returned by os.path.expanduser('~')). + """ + return cls(cls()._flavour.gethomedir(None)) + + def samefile(self, other_path): + """Return whether other_path is the same or not as this file + (as returned by os.path.samefile()). + """ + if hasattr(os.path, "samestat"): + st = self.stat() + try: + other_st = other_path.stat() + except AttributeError: + other_st = os.stat(other_path) + return os.path.samestat(st, other_st) + else: + filename1 = six.text_type(self) + filename2 = six.text_type(other_path) + st1 = _win32_get_unique_path_id(filename1) + st2 = _win32_get_unique_path_id(filename2) + return st1 == st2 + + def iterdir(self): + """Iterate over the files in this directory. Does not yield any + result for the special paths '.' and '..'. + """ + if self._closed: + self._raise_closed() + for name in self._accessor.listdir(self): + if name in ('.', '..'): + # Yielding a path object for these makes little sense + continue + yield self._make_child_relpath(name) + if self._closed: + self._raise_closed() + + def glob(self, pattern): + """Iterate over this subtree and yield all existing files (of any + kind, including directories) matching the given pattern. + """ + if not pattern: + raise ValueError("Unacceptable pattern: {0!r}".format(pattern)) + pattern = self._flavour.casefold(pattern) + drv, root, pattern_parts = self._flavour.parse_parts((pattern,)) + if drv or root: + raise NotImplementedError("Non-relative patterns are unsupported") + selector = _make_selector(tuple(pattern_parts)) + for p in selector.select_from(self): + yield p + + def rglob(self, pattern): + """Recursively yield all existing files (of any kind, including + directories) matching the given pattern, anywhere in this subtree. + """ + pattern = self._flavour.casefold(pattern) + drv, root, pattern_parts = self._flavour.parse_parts((pattern,)) + if drv or root: + raise NotImplementedError("Non-relative patterns are unsupported") + selector = _make_selector(("**",) + tuple(pattern_parts)) + for p in selector.select_from(self): + yield p + + def absolute(self): + """Return an absolute version of this path. This function works + even if the path doesn't point to anything. + + No normalization is done, i.e. all '.' and '..' will be kept along. + Use resolve() to get the canonical path to a file. + """ + # XXX untested yet! + if self._closed: + self._raise_closed() + if self.is_absolute(): + return self + # FIXME this must defer to the specific flavour (and, under Windows, + # use nt._getfullpathname()) + obj = self._from_parts([os.getcwd()] + self._parts, init=False) + obj._init(template=self) + return obj + + def resolve(self, strict=False): + """ + Make the path absolute, resolving all symlinks on the way and also + normalizing it (for example turning slashes into backslashes under + Windows). + """ + if self._closed: + self._raise_closed() + s = self._flavour.resolve(self, strict=strict) + if s is None: + # No symlink resolution => for consistency, raise an error if + # the path doesn't exist or is forbidden + self.stat() + s = str(self.absolute()) + # Now we have no symlinks in the path, it's safe to normalize it. + normed = self._flavour.pathmod.normpath(s) + obj = self._from_parts((normed,), init=False) + obj._init(template=self) + return obj + + def stat(self): + """ + Return the result of the stat() system call on this path, like + os.stat() does. + """ + return self._accessor.stat(self) + + def owner(self): + """ + Return the login name of the file owner. + """ + import pwd + return pwd.getpwuid(self.stat().st_uid).pw_name + + def group(self): + """ + Return the group name of the file gid. + """ + import grp + return grp.getgrgid(self.stat().st_gid).gr_name + + def open(self, mode='r', buffering=-1, encoding=None, + errors=None, newline=None): + """ + Open the file pointed by this path and return a file object, as + the built-in open() function does. + """ + if self._closed: + self._raise_closed() + if sys.version_info >= (3, 3): + return io.open( + str(self), mode, buffering, encoding, errors, newline, + opener=self._opener) + else: + return io.open(str(self), mode, buffering, + encoding, errors, newline) + + def read_bytes(self): + """ + Open the file in bytes mode, read it, and close the file. + """ + with self.open(mode='rb') as f: + return f.read() + + def read_text(self, encoding=None, errors=None): + """ + Open the file in text mode, read it, and close the file. + """ + with self.open(mode='r', encoding=encoding, errors=errors) as f: + return f.read() + + def write_bytes(self, data): + """ + Open the file in bytes mode, write to it, and close the file. + """ + if not isinstance(data, six.binary_type): + raise TypeError( + 'data must be %s, not %s' % + (six.binary_type.__class__.__name__, data.__class__.__name__)) + with self.open(mode='wb') as f: + return f.write(data) + + def write_text(self, data, encoding=None, errors=None): + """ + Open the file in text mode, write to it, and close the file. + """ + if not isinstance(data, six.text_type): + raise TypeError( + 'data must be %s, not %s' % + (six.text_type.__class__.__name__, data.__class__.__name__)) + with self.open(mode='w', encoding=encoding, errors=errors) as f: + return f.write(data) + + def touch(self, mode=0o666, exist_ok=True): + """ + Create this file with the given access mode, if it doesn't exist. + """ + if self._closed: + self._raise_closed() + if exist_ok: + # First try to bump modification time + # Implementation note: GNU touch uses the UTIME_NOW option of + # the utimensat() / futimens() functions. + try: + self._accessor.utime(self, None) + except OSError: + # Avoid exception chaining + pass + else: + return + flags = os.O_CREAT | os.O_WRONLY + if not exist_ok: + flags |= os.O_EXCL + fd = self._raw_open(flags, mode) + os.close(fd) + + def mkdir(self, mode=0o777, parents=False, exist_ok=False): + + def helper(exc): + if not exist_ok or not self.is_dir(): + raise exc + + if self._closed: + self._raise_closed() + if not parents: + _try_except_fileexistserror( + lambda: self._accessor.mkdir(self, mode), + helper) + else: + try: + _try_except_fileexistserror( + lambda: self._accessor.mkdir(self, mode), + helper) + except OSError as e: + if e.errno != ENOENT: + raise + self.parent.mkdir(parents=True) + self._accessor.mkdir(self, mode) + + def chmod(self, mode): + """ + Change the permissions of the path, like os.chmod(). + """ + if self._closed: + self._raise_closed() + self._accessor.chmod(self, mode) + + def lchmod(self, mode): + """ + Like chmod(), except if the path points to a symlink, the symlink's + permissions are changed, rather than its target's. + """ + if self._closed: + self._raise_closed() + self._accessor.lchmod(self, mode) + + def unlink(self): + """ + Remove this file or link. + If the path is a directory, use rmdir() instead. + """ + if self._closed: + self._raise_closed() + self._accessor.unlink(self) + + def rmdir(self): + """ + Remove this directory. The directory must be empty. + """ + if self._closed: + self._raise_closed() + self._accessor.rmdir(self) + + def lstat(self): + """ + Like stat(), except if the path points to a symlink, the symlink's + status information is returned, rather than its target's. + """ + if self._closed: + self._raise_closed() + return self._accessor.lstat(self) + + def rename(self, target): + """ + Rename this path to the given path. + """ + if self._closed: + self._raise_closed() + self._accessor.rename(self, target) + + def replace(self, target): + """ + Rename this path to the given path, clobbering the existing + destination if it exists. + """ + if sys.version_info < (3, 3): + raise NotImplementedError("replace() is only available " + "with Python 3.3 and later") + if self._closed: + self._raise_closed() + self._accessor.replace(self, target) + + def symlink_to(self, target, target_is_directory=False): + """ + Make this path a symlink pointing to the given path. + Note the order of arguments (self, target) is the reverse of + os.symlink's. + """ + if self._closed: + self._raise_closed() + self._accessor.symlink(target, self, target_is_directory) + + # Convenience functions for querying the stat results + + def exists(self): + """ + Whether this path exists. + """ + try: + self.stat() + except OSError as e: + if e.errno not in (ENOENT, ENOTDIR): + raise + return False + return True + + def is_dir(self): + """ + Whether this path is a directory. + """ + try: + return S_ISDIR(self.stat().st_mode) + except OSError as e: + if e.errno not in (ENOENT, ENOTDIR): + raise + # Path doesn't exist or is a broken symlink + # (see https://bitbucket.org/pitrou/pathlib/issue/12/) + return False + + def is_file(self): + """ + Whether this path is a regular file (also True for symlinks pointing + to regular files). + """ + try: + return S_ISREG(self.stat().st_mode) + except OSError as e: + if e.errno not in (ENOENT, ENOTDIR): + raise + # Path doesn't exist or is a broken symlink + # (see https://bitbucket.org/pitrou/pathlib/issue/12/) + return False + + def is_symlink(self): + """ + Whether this path is a symbolic link. + """ + try: + return S_ISLNK(self.lstat().st_mode) + except OSError as e: + if e.errno not in (ENOENT, ENOTDIR): + raise + # Path doesn't exist + return False + + def is_block_device(self): + """ + Whether this path is a block device. + """ + try: + return S_ISBLK(self.stat().st_mode) + except OSError as e: + if e.errno not in (ENOENT, ENOTDIR): + raise + # Path doesn't exist or is a broken symlink + # (see https://bitbucket.org/pitrou/pathlib/issue/12/) + return False + + def is_char_device(self): + """ + Whether this path is a character device. + """ + try: + return S_ISCHR(self.stat().st_mode) + except OSError as e: + if e.errno not in (ENOENT, ENOTDIR): + raise + # Path doesn't exist or is a broken symlink + # (see https://bitbucket.org/pitrou/pathlib/issue/12/) + return False + + def is_fifo(self): + """ + Whether this path is a FIFO. + """ + try: + return S_ISFIFO(self.stat().st_mode) + except OSError as e: + if e.errno not in (ENOENT, ENOTDIR): + raise + # Path doesn't exist or is a broken symlink + # (see https://bitbucket.org/pitrou/pathlib/issue/12/) + return False + + def is_socket(self): + """ + Whether this path is a socket. + """ + try: + return S_ISSOCK(self.stat().st_mode) + except OSError as e: + if e.errno not in (ENOENT, ENOTDIR): + raise + # Path doesn't exist or is a broken symlink + # (see https://bitbucket.org/pitrou/pathlib/issue/12/) + return False + + def expanduser(self): + """ Return a new path with expanded ~ and ~user constructs + (as returned by os.path.expanduser) + """ + if (not (self._drv or self._root) + and self._parts and self._parts[0][:1] == '~'): + homedir = self._flavour.gethomedir(self._parts[0][1:]) + return self._from_parts([homedir] + self._parts[1:]) + + return self + + +class PosixPath(Path, PurePosixPath): + __slots__ = () + + +class WindowsPath(Path, PureWindowsPath): + __slots__ = () + + def owner(self): + raise NotImplementedError("Path.owner() is unsupported on this system") + + def group(self): + raise NotImplementedError("Path.group() is unsupported on this system") diff --git a/lib/rarfile.py b/lib/rarfile.py index 25b61196a6..78148c1916 100644 --- a/lib/rarfile.py +++ b/lib/rarfile.py @@ -54,74 +54,127 @@ # Set to full path of unrar.exe if it is not in PATH rarfile.UNRAR_TOOL = "unrar" - # Set to 0 if you don't look at comments and want to - # avoid wasting time for parsing them - rarfile.NEED_COMMENTS = 1 - - # Set up to 1 if you don't want to deal with decoding comments - # from unknown encoding. rarfile will try couple of common - # encodings in sequence. - rarfile.UNICODE_COMMENTS = 0 - - # Set to 1 if you prefer timestamps to be datetime objects - # instead tuples - rarfile.USE_DATETIME = 0 - - # Set to '/' to be more compatible with zipfile - rarfile.PATH_SEP = '\\' + # Set to '\\' to be more compatible with old rarfile + rarfile.PATH_SEP = '/' For more details, refer to source. """ -__version__ = '2.8' - -# export only interesting items -__all__ = ['is_rarfile', 'RarInfo', 'RarFile', 'RarExtFile'] +from __future__ import division, print_function ## ## Imports and compat - support both Python 2.x and 3.x ## -import sys, os, struct, errno +import sys +import os +import errno +import struct + from struct import pack, unpack, Struct -from binascii import crc32 +from binascii import crc32, hexlify from tempfile import mkstemp from subprocess import Popen, PIPE, STDOUT -from datetime import datetime from io import RawIOBase -from hashlib import sha1 +from hashlib import sha1, sha256 +from hmac import HMAC +from datetime import datetime, timedelta, tzinfo + +# fixed offset timezone, for UTC +try: + from datetime import timezone +except ImportError: + class timezone(tzinfo): + """Compat timezone.""" + __slots__ = ('_ofs', '_name') + _DST = timedelta(0) + + def __init__(self, offset, name): + super(timezone, self).__init__() + self._ofs, self._name = offset, name + + def utcoffset(self, dt): + return self._ofs + + def tzname(self, dt): + return self._name + + def dst(self, dt): + return self._DST # only needed for encryped headers try: try: from cryptography.hazmat.primitives.ciphers import algorithms, modes, Cipher from cryptography.hazmat.backends import default_backend + from cryptography.hazmat.primitives import hashes + from cryptography.hazmat.primitives.kdf import pbkdf2 + class AES_CBC_Decrypt(object): - block_size = 16 + """Decrypt API""" def __init__(self, key, iv): ciph = Cipher(algorithms.AES(key), modes.CBC(iv), default_backend()) - self.dec = ciph.decryptor() - def decrypt(self, data): - return self.dec.update(data) + self.decrypt = ciph.decryptor().update + + def pbkdf2_sha256(password, salt, iters): + """PBKDF2 with HMAC-SHA256""" + ctx = pbkdf2.PBKDF2HMAC(hashes.SHA256(), 32, salt, iters, default_backend()) + return ctx.derive(password) + except ImportError: from Crypto.Cipher import AES + from Crypto.Protocol import KDF + class AES_CBC_Decrypt(object): - block_size = 16 + """Decrypt API""" def __init__(self, key, iv): - self.dec = AES.new(key, AES.MODE_CBC, iv) - def decrypt(self, data): - return self.dec.decrypt(data) + self.decrypt = AES.new(key, AES.MODE_CBC, iv).decrypt + + def pbkdf2_sha256(password, salt, iters): + """PBKDF2 with HMAC-SHA256""" + return KDF.PBKDF2(password, salt, 32, iters, hmac_sha256) + _have_crypto = 1 except ImportError: _have_crypto = 0 +try: + try: + from hashlib import blake2s + _have_blake2 = True + except ImportError: + from pyblake2 import blake2s + _have_blake2 = True +except ImportError: + _have_blake2 = False + # compat with 2.x if sys.hexversion < 0x3000000: - # prefer 3.x behaviour - range = xrange -else: + def rar_crc32(data, prev=0): + """CRC32 with unsigned values. + """ + if (prev > 0) and (prev & 0x80000000): + prev -= (1 << 32) + res = crc32(data, prev) + if res < 0: + res += (1 << 32) + return res + tohex = hexlify + _byte_code = ord +else: # pragma: no cover + def tohex(data): + """Return hex string.""" + return hexlify(data).decode('ascii') + rar_crc32 = crc32 unicode = str + _byte_code = int # noqa + + +__version__ = '3.0' + +# export only interesting items +__all__ = ['is_rarfile', 'RarInfo', 'RarFile', 'RarExtFile'] ## ## Module configuration. Can be tuned after importing. @@ -166,36 +219,27 @@ def decrypt(self, data): USE_EXTRACT_HACK = 1 #: limit the filesize for tmp archive usage -HACK_SIZE_LIMIT = 20*1024*1024 - -#: whether to parse file/archive comments. -NEED_COMMENTS = 1 - -#: whether to convert comments to unicode strings -UNICODE_COMMENTS = 0 - -#: Convert RAR time tuple into datetime() object -USE_DATETIME = 0 +HACK_SIZE_LIMIT = 20 * 1024 * 1024 #: Separator for path name components. RAR internally uses '\\'. #: Use '/' to be similar with zipfile. -PATH_SEP = '\\' +PATH_SEP = '/' ## ## rar constants ## # block types -RAR_BLOCK_MARK = 0x72 # r -RAR_BLOCK_MAIN = 0x73 # s -RAR_BLOCK_FILE = 0x74 # t -RAR_BLOCK_OLD_COMMENT = 0x75 # u -RAR_BLOCK_OLD_EXTRA = 0x76 # v -RAR_BLOCK_OLD_SUB = 0x77 # w -RAR_BLOCK_OLD_RECOVERY = 0x78 # x -RAR_BLOCK_OLD_AUTH = 0x79 # y -RAR_BLOCK_SUB = 0x7a # z -RAR_BLOCK_ENDARC = 0x7b # { +RAR_BLOCK_MARK = 0x72 # r +RAR_BLOCK_MAIN = 0x73 # s +RAR_BLOCK_FILE = 0x74 # t +RAR_BLOCK_OLD_COMMENT = 0x75 # u +RAR_BLOCK_OLD_EXTRA = 0x76 # v +RAR_BLOCK_OLD_SUB = 0x77 # w +RAR_BLOCK_OLD_RECOVERY = 0x78 # x +RAR_BLOCK_OLD_AUTH = 0x79 # y +RAR_BLOCK_SUB = 0x7a # z +RAR_BLOCK_ENDARC = 0x7b # { # flags for RAR_BLOCK_MAIN RAR_MAIN_VOLUME = 0x0001 @@ -257,196 +301,335 @@ def decrypt(self, data): RAR_M4 = 0x34 RAR_M5 = 0x35 +# +# RAR5 constants +# + +RAR5_BLOCK_MAIN = 1 +RAR5_BLOCK_FILE = 2 +RAR5_BLOCK_SERVICE = 3 +RAR5_BLOCK_ENCRYPTION = 4 +RAR5_BLOCK_ENDARC = 5 + +RAR5_BLOCK_FLAG_EXTRA_DATA = 0x01 +RAR5_BLOCK_FLAG_DATA_AREA = 0x02 +RAR5_BLOCK_FLAG_SKIP_IF_UNKNOWN = 0x04 +RAR5_BLOCK_FLAG_SPLIT_BEFORE = 0x08 +RAR5_BLOCK_FLAG_SPLIT_AFTER = 0x10 +RAR5_BLOCK_FLAG_DEPENDS_PREV = 0x20 +RAR5_BLOCK_FLAG_KEEP_WITH_PARENT = 0x40 + +RAR5_MAIN_FLAG_ISVOL = 0x01 +RAR5_MAIN_FLAG_HAS_VOLNR = 0x02 +RAR5_MAIN_FLAG_SOLID = 0x04 +RAR5_MAIN_FLAG_RECOVERY = 0x08 +RAR5_MAIN_FLAG_LOCKED = 0x10 + +RAR5_FILE_FLAG_ISDIR = 0x01 +RAR5_FILE_FLAG_HAS_MTIME = 0x02 +RAR5_FILE_FLAG_HAS_CRC32 = 0x04 +RAR5_FILE_FLAG_UNKNOWN_SIZE = 0x08 + +RAR5_COMPR_SOLID = 0x40 + +RAR5_ENC_FLAG_HAS_CHECKVAL = 0x01 + +RAR5_ENDARC_FLAG_NEXT_VOL = 0x01 + +RAR5_XFILE_ENCRYPTION = 1 +RAR5_XFILE_HASH = 2 +RAR5_XFILE_TIME = 3 +RAR5_XFILE_VERSION = 4 +RAR5_XFILE_REDIR = 5 +RAR5_XFILE_OWNER = 6 +RAR5_XFILE_SERVICE = 7 + +RAR5_XTIME_UNIXTIME = 0x01 +RAR5_XTIME_HAS_MTIME = 0x02 +RAR5_XTIME_HAS_CTIME = 0x04 +RAR5_XTIME_HAS_ATIME = 0x08 + +RAR5_XENC_CIPHER_AES256 = 0 + +RAR5_XENC_CHECKVAL = 0x01 +RAR5_XENC_TWEAKED = 0x02 + +RAR5_XHASH_BLAKE2SP = 0 + +RAR5_XREDIR_UNIX_SYMLINK = 1 +RAR5_XREDIR_WINDOWS_SYMLINK = 2 +RAR5_XREDIR_WINDOWS_JUNCTION = 3 +RAR5_XREDIR_HARD_LINK = 4 +RAR5_XREDIR_FILE_COPY = 5 + +RAR5_XREDIR_ISDIR = 0x01 + +RAR5_XOWNER_UNAME = 0x01 +RAR5_XOWNER_GNAME = 0x02 +RAR5_XOWNER_UID = 0x04 +RAR5_XOWNER_GID = 0x08 + +RAR5_OS_WINDOWS = 0 +RAR5_OS_UNIX = 1 + ## ## internal constants ## RAR_ID = b"Rar!\x1a\x07\x00" -ZERO = b"\0" -EMPTY = b"" - -S_BLK_HDR = Struct(' 0 + class Error(Exception): """Base class for rarfile errors.""" + class BadRarFile(Error): """Incorrect data in archive.""" + class NotRarFile(Error): """The file is not RAR archive.""" + class BadRarName(Error): """Cannot guess multipart name components.""" + class NoRarEntry(Error): """File not found in RAR""" + class PasswordRequired(Error): """File requires password""" + class NeedFirstVolume(Error): """Need to start from first volume.""" + class NoCrypto(Error): """Cannot parse encrypted headers - no crypto available.""" + class RarExecError(Error): """Problem reported by unrar/rar.""" + class RarWarning(RarExecError): """Non-fatal error""" + class RarFatalError(RarExecError): """Fatal error""" + class RarCRCError(RarExecError): """CRC error during unpacking""" + class RarLockedArchiveError(RarExecError): """Must not modify locked archive""" + class RarWriteError(RarExecError): """Write error""" + class RarOpenError(RarExecError): """Open error""" + class RarUserError(RarExecError): """User error""" + class RarMemoryError(RarExecError): """Memory error""" + class RarCreateError(RarExecError): """Create error""" + class RarNoFilesError(RarExecError): """No files that match pattern were found""" + class RarUserBreak(RarExecError): """User stop""" + +class RarWrongPassword(RarExecError): + """Incorrect password""" + class RarUnknownError(RarExecError): """Unknown exit code""" + class RarSignalExit(RarExecError): """Unrar exited with signal""" + class RarCannotExec(RarExecError): """Executable not found.""" -def is_rarfile(xfile): - '''Check quickly whether file is rar archive.''' - fd = XFile(xfile) - buf = fd.read(len(RAR_ID)) - fd.close() - return buf == RAR_ID +class RarInfo(object): + r"""An entry in rar archive. + RAR3 extended timestamps are :class:`datetime.datetime` objects without timezone. + RAR5 extended timestamps are :class:`datetime.datetime` objects with UTC timezone. -class RarInfo(object): - r'''An entry in rar archive. + Attributes: - :mod:`zipfile`-compatible fields: - filename File name with relative path. - Default path separator is '\\', to change set rarfile.PATH_SEP. - Always unicode string. + Path separator is '/'. Always unicode string. + date_time - Modification time, tuple of (year, month, day, hour, minute, second). - Or datetime() object if USE_DATETIME is set. + File modification timestamp. As tuple of (year, month, day, hour, minute, second). + RAR5 allows archives where it is missing, it's None then. + file_size Uncompressed size. + compress_size Compressed size. - CRC - CRC-32 of uncompressed file, unsigned int. - comment - File comment. Byte string or None. Use UNICODE_COMMENTS - to get automatic decoding to unicode. - volume - Volume nr, starting from 0. - - RAR-specific fields: compress_type - Compression method: 0x30 - 0x35. + Compression method: one of :data:`RAR_M0` .. :data:`RAR_M5` constants. + extract_version - Minimal Rar version needed for decompressing. + Minimal Rar version needed for decompressing. As (major*10 + minor), + so 2.9 is 29. + + RAR3: 10, 20, 29 + + RAR5 does not have such field in archive, it's simply set to 50. + host_os Host OS type, one of RAR_OS_* constants. + + RAR3: :data:`RAR_OS_WIN32`, :data:`RAR_OS_UNIX`, :data:`RAR_OS_MSDOS`, + :data:`RAR_OS_OS2`, :data:`RAR_OS_BEOS`. + + RAR5: :data:`RAR_OS_WIN32`, :data:`RAR_OS_UNIX`. + mode File attributes. May be either dos-style or unix-style, depending on host_os. - volume_file - Volume file name, where file starts. + mtime - Optional time field: Modification time, with float seconds. - Same as .date_time but with more precision. + File modification time. Same value as :attr:`date_time` + but as :class:`datetime.datetime` object with extended precision. + ctime - Optional time field: creation time, with float seconds. + Optional time field: creation time. As :class:`datetime.datetime` object. + atime - Optional time field: last access time, with float seconds. + Optional time field: last access time. As :class:`datetime.datetime` object. + arctime - Optional time field: archival time, with float seconds. - - Internal fields: - - type - One of RAR_BLOCK_* types. Only entries with type==RAR_BLOCK_FILE are shown in .infolist(). - flags - For files, RAR_FILE_* bits. - ''' - - __slots__ = ( - # zipfile-compatible fields - 'filename', - 'file_size', - 'compress_size', - 'date_time', - 'comment', - 'CRC', - 'volume', - 'orig_filename', # bytes in unknown encoding - - # rar-specific fields - 'extract_version', - 'compress_type', - 'host_os', - 'mode', - 'type', - 'flags', - - # optional extended time fields - # tuple where the sec is float, or datetime(). - 'mtime', # same as .date_time - 'ctime', - 'atime', - 'arctime', - - # RAR internals - 'name_size', - 'header_size', - 'header_crc', - 'file_offset', - 'add_size', - 'header_data', - 'header_base', - 'header_offset', - 'salt', - 'volume_file', - ) + Optional time field: archival time. As :class:`datetime.datetime` object. + (RAR3-only) + + CRC + CRC-32 of uncompressed file, unsigned int. + + RAR5: may be None. + + blake2sp_hash + Blake2SP hash over decompressed data. (RAR5-only) + + comment + Optional file comment field. Unicode string. (RAR3-only) + + file_redir + If not None, file is link of some sort. Contains tuple of (type, flags, target). + (RAR5-only) + + Type is one of constants: + + :data:`RAR5_XREDIR_UNIX_SYMLINK` + unix symlink to target. + :data:`RAR5_XREDIR_WINDOWS_SYMLINK` + windows symlink to target. + :data:`RAR5_XREDIR_WINDOWS_JUNCTION` + windows junction. + :data:`RAR5_XREDIR_HARD_LINK` + hard link to target. + :data:`RAR5_XREDIR_FILE_COPY` + current file is copy of another archive entry. + + Flags may contain :data:`RAR5_XREDIR_ISDIR` bit. + + volume + Volume nr, starting from 0. + + volume_file + Volume file name, where file starts. + + """ + + # zipfile-compatible fields + filename = None + file_size = None + compress_size = None + date_time = None + comment = None + CRC = None + volume = None + orig_filename = None + + # optional extended time fields, datetime() objects. + mtime = None + ctime = None + atime = None + + extract_version = None + mode = None + host_os = None + compress_type = None + + # rar3-only fields + comment = None + arctime = None + + # rar5-only fields + blake2sp_hash = None + file_redir = None + + # internal fields + flags = 0 + type = None def isdir(self): - '''Returns True if the entry is a directory.''' + """Returns True if entry is a directory. + """ if self.type == RAR_BLOCK_FILE: return (self.flags & RAR_FILE_DIRECTORY) == RAR_FILE_DIRECTORY return False def needs_password(self): - return (self.flags & RAR_FILE_PASSWORD) > 0 + """Returns True if data is stored password-protected. + """ + if self.type == RAR_BLOCK_FILE: + return (self.flags & RAR_FILE_PASSWORD) > 0 + return False class RarFile(object): - '''Parse RAR structure, provide access to files in archive. - ''' + """Parse RAR structure, provide access to files in archive. + """ - #: Archive comment. Byte string or None. Use :data:`UNICODE_COMMENTS` - #: to get automatic decoding to unicode. + #: Archive comment. Unicode string or None. comment = None def __init__(self, rarfile, mode="r", charset=None, info_callback=None, - crc_check = True, errors = "stop"): + crc_check=True, errors="stop"): """Open and parse a RAR archive. - + Parameters: rarfile @@ -463,18 +646,12 @@ def __init__(self, rarfile, mode="r", charset=None, info_callback=None, Either "stop" to quietly stop parsing on errors, or "strict" to raise errors. Default is "stop". """ - self.rarfile = rarfile - self.comment = None + self._rarfile = rarfile self._charset = charset or DEFAULT_CHARSET self._info_callback = info_callback - - self._info_list = [] - self._info_map = {} - self._parse_error = None - self._needs_password = False - self._password = None self._crc_check = crc_check - self._vol_list = [] + self._password = None + self._file_parser = None if errors == "stop": self._strict = False @@ -483,69 +660,62 @@ def __init__(self, rarfile, mode="r", charset=None, info_callback=None, else: raise ValueError("Invalid value for 'errors' parameter.") - self._main = None - if mode != "r": raise NotImplementedError("RarFile supports only mode=r") self._parse() def __enter__(self): + """Open context.""" return self - def __exit__(self, type, value, traceback): + def __exit__(self, typ, value, traceback): + """Exit context""" self.close() def setpassword(self, password): - '''Sets the password to use when extracting.''' + """Sets the password to use when extracting. + """ self._password = password - if not self._main: + if self._file_parser: + if self._file_parser.has_header_encryption(): + self._file_parser = None + if not self._file_parser: self._parse() + else: + self._file_parser.setpassword(self._password) def needs_password(self): - '''Returns True if any archive entries require password for extraction.''' - return self._needs_password + """Returns True if any archive entries require password for extraction. + """ + return self._file_parser.needs_password() def namelist(self): - '''Return list of filenames in archive.''' + """Return list of filenames in archive. + """ return [f.filename for f in self.infolist()] def infolist(self): - '''Return RarInfo objects for all files/directories in archive.''' - return self._info_list + """Return RarInfo objects for all files/directories in archive. + """ + return self._file_parser.infolist() def volumelist(self): - '''Returns filenames of archive volumes. + """Returns filenames of archive volumes. In case of single-volume archive, the list contains just the name of main archive file. - ''' - return self._vol_list + """ + return self._file_parser.volumelist() def getinfo(self, fname): - '''Return RarInfo for file.''' - - if isinstance(fname, RarInfo): - return fname - - # accept both ways here - if PATH_SEP == '/': - fname2 = fname.replace("\\", "/") - else: - fname2 = fname.replace("/", "\\") + """Return RarInfo for file. + """ + return self._file_parser.getinfo(fname) - try: - return self._info_map[fname] - except KeyError: - try: - return self._info_map[fname2] - except KeyError: - raise NoRarEntry("No such file: "+fname) + def open(self, fname, mode='r', psw=None): + """Returns file-like object (:class:`RarExtFile`) from where the data can be read. - def open(self, fname, mode = 'r', psw = None): - '''Returns file-like object (:class:`RarExtFile`), - from where the data can be read. - The object implements :class:`io.RawIOBase` interface, so it can be further wrapped with :class:`io.BufferedReader` and :class:`io.TextIOWrapper`. @@ -565,7 +735,7 @@ def open(self, fname, mode = 'r', psw = None): must be 'r' psw password to use for extracting. - ''' + """ if mode != 'r': raise NotImplementedError("RarFile.open() supports only mode=r") @@ -575,9 +745,6 @@ def open(self, fname, mode = 'r', psw = None): if inf.isdir(): raise TypeError("Directory does not have any data: " + inf.filename) - if inf.flags & RAR_FILE_SPLIT_BEFORE: - raise NeedFirstVolume("Partial file, please start from first volume: " + inf.filename) - # check password if inf.needs_password(): psw = psw or self._password @@ -586,34 +753,11 @@ def open(self, fname, mode = 'r', psw = None): else: psw = None - # is temp write usable? - use_hack = 1 - if not self._main: - use_hack = 0 - elif self._main.flags & (RAR_MAIN_SOLID | RAR_MAIN_PASSWORD): - use_hack = 0 - elif inf.flags & (RAR_FILE_SPLIT_BEFORE | RAR_FILE_SPLIT_AFTER): - use_hack = 0 - elif is_filelike(self.rarfile): - pass - elif inf.file_size > HACK_SIZE_LIMIT: - use_hack = 0 - elif not USE_EXTRACT_HACK: - use_hack = 0 - - # now extract - if inf.compress_type == RAR_M0 and (inf.flags & RAR_FILE_PASSWORD) == 0: - return self._open_clear(inf) - elif use_hack: - return self._open_hack(inf, psw) - elif is_filelike(self.rarfile): - return self._open_unrar_membuf(self.rarfile, inf, psw) - else: - return self._open_unrar(self.rarfile, inf, psw) + return self._file_parser.open(inf, psw) - def read(self, fname, psw = None): + def read(self, fname, psw=None): """Return uncompressed data for archive entry. - + For longer files using :meth:`RarFile.open` may be better idea. Parameters: @@ -624,11 +768,8 @@ def read(self, fname, psw = None): password to use for extracting. """ - f = self.open(fname, 'r', psw) - try: + with self.open(fname, 'r', psw) as f: return f.read() - finally: - f.close() def close(self): """Release open resources.""" @@ -641,7 +782,7 @@ def printdir(self): def extract(self, member, path=None, pwd=None): """Extract single file into current directory. - + Parameters: member @@ -659,7 +800,7 @@ def extract(self, member, path=None, pwd=None): def extractall(self, path=None, members=None, pwd=None): """Extract all files into current directory. - + Parameters: path @@ -684,77 +825,149 @@ def testrar(self): cmd = [UNRAR_TOOL] + list(TEST_ARGS) add_password_arg(cmd, self._password) cmd.append('--') - - if is_filelike(self.rarfile): - tmpname = membuf_tempfile(self.rarfile) - cmd.append(tmpname) - else: - tmpname = None - cmd.append(self.rarfile) - - try: + with XTempFile(self._rarfile) as rarfile: + cmd.append(rarfile) p = custom_popen(cmd) output = p.communicate()[0] check_returncode(p, output) - finally: - if tmpname: - os.unlink(tmpname) def strerror(self): - """Return error string if parsing failed, - or None if no problems. + """Return error string if parsing failed or None if no problems. """ - return self._parse_error + if not self._file_parser: + return "Not a RAR file" + return self._file_parser.strerror() ## ## private methods ## - def _set_error(self, msg, *args): - if args: - msg = msg % args - self._parse_error = msg - if self._strict: - raise BadRarFile(msg) + def _parse(self): + ver = _get_rar_version(self._rarfile) + if ver == 3: + p3 = RAR3Parser(self._rarfile, self._password, self._crc_check, + self._charset, self._strict, self._info_callback) + self._file_parser = p3 # noqa + elif ver == 5: + p5 = RAR5Parser(self._rarfile, self._password, self._crc_check, + self._charset, self._strict, self._info_callback) + self._file_parser = p5 # noqa + else: + raise BadRarFile("Not a RAR file") - # store entry - def _process_entry(self, item): - if item.type == RAR_BLOCK_FILE: - # use only first part - if (item.flags & RAR_FILE_SPLIT_BEFORE) == 0: - self._info_map[item.filename] = item - self._info_list.append(item) - # remember if any items require password - if item.needs_password(): - self._needs_password = True - elif len(self._info_list) > 0: - # final crc is in last block - old = self._info_list[-1] - old.CRC = item.CRC - old.compress_size += item.compress_size + self._file_parser.parse() + self.comment = self._file_parser.comment - # parse new-style comment - if item.type == RAR_BLOCK_SUB and item.filename == 'CMT': - if not NEED_COMMENTS: - pass - elif item.flags & (RAR_FILE_SPLIT_BEFORE | RAR_FILE_SPLIT_AFTER): - pass - elif item.flags & RAR_FILE_SOLID: - # file comment - cmt = self._read_comment_v3(item, self._password) - if len(self._info_list) > 0: - old = self._info_list[-1] - old.comment = cmt - else: - # archive comment - cmt = self._read_comment_v3(item, self._password) - self.comment = cmt + # call unrar to extract a file + def _extract(self, fnlist, path=None, psw=None): + cmd = [UNRAR_TOOL] + list(EXTRACT_ARGS) - if self._info_callback: - self._info_callback(item) + # pasoword + psw = psw or self._password + add_password_arg(cmd, psw) + cmd.append('--') + + # rar file + with XTempFile(self._rarfile) as rarfn: + cmd.append(rarfn) + + # file list + for fn in fnlist: + if os.sep != PATH_SEP: + fn = fn.replace(PATH_SEP, os.sep) + cmd.append(fn) + + # destination path + if path is not None: + cmd.append(path + os.sep) + + # call + p = custom_popen(cmd) + output = p.communicate()[0] + check_returncode(p, output) + +# +# File format parsing +# + +class CommonParser(object): + """Shared parser parts.""" + _main = None + _hdrenc_main = None + _needs_password = False + _fd = None + _expect_sig = None + _parse_error = None + _password = None + comment = None + + def __init__(self, rarfile, password, crc_check, charset, strict, info_cb): + self._rarfile = rarfile + self._password = password + self._crc_check = crc_check + self._charset = charset + self._strict = strict + self._info_callback = info_cb + self._info_list = [] + self._info_map = {} + self._vol_list = [] + + def has_header_encryption(self): + """Returns True if headers are encrypted + """ + if self._hdrenc_main: + return True + if self._main: + if self._main.flags & RAR_MAIN_PASSWORD: + return True + return False + + def setpassword(self, psw): + """Set cached password.""" + self._password = psw + + def volumelist(self): + """Volume files""" + return self._vol_list + + def needs_password(self): + """Is password required""" + return self._needs_password + + def strerror(self): + """Last error""" + return self._parse_error + + def infolist(self): + """List of RarInfo records. + """ + return self._info_list + + def getinfo(self, member): + """Return RarInfo for filename + """ + if isinstance(member, RarInfo): + fname = member.filename + else: + fname = member + + # accept both ways here + if PATH_SEP == '/': + fname2 = fname.replace("\\", "/") + else: + fname2 = fname.replace("/", "\\") + + try: + return self._info_map[fname] + except KeyError: + try: + return self._info_map[fname2] + except KeyError: + raise NoRarEntry("No such file: %s" % fname) # read rar - def _parse(self): + def parse(self): + """Process file.""" self._fd = None try: self._parse_real() @@ -764,19 +977,19 @@ def _parse(self): self._fd = None def _parse_real(self): - fd = XFile(self.rarfile) + fd = XFile(self._rarfile) self._fd = fd - id = fd.read(len(RAR_ID)) - if id != RAR_ID: - if isinstance(self.rarfile, (str, unicode)): - raise NotRarFile("Not a Rar archive: {}".format(self.rarfile)) + sig = fd.read(len(self._expect_sig)) + if sig != self._expect_sig: + if isinstance(self._rarfile, (str, unicode)): + raise NotRarFile("Not a Rar archive: {}".format(self._rarfile)) raise NotRarFile("Not a Rar archive") volume = 0 # first vol (.rar) is 0 - more_vols = 0 - endarc = 0 - volfile = self.rarfile - self._vol_list = [self.rarfile] + more_vols = False + endarc = False + volfile = self._rarfile + self._vol_list = [self._rarfile] while 1: if endarc: h = None # don't read past ENDARC @@ -793,8 +1006,12 @@ def _parse_real(self): self._set_error("Cannot open next volume: %s", volfile) break self._fd = fd - more_vols = 0 - endarc = 0 + sig = fd.read(len(self._expect_sig)) + if sig != self._expect_sig: + self._set_error("Invalid volume sig: %s", volfile) + break + more_vols = False + endarc = False self._vol_list.append(volfile) continue break @@ -811,44 +1028,49 @@ def _parse_real(self): if h.flags & RAR_MAIN_PASSWORD: self._needs_password = True if not self._password: - self._main = None break elif h.type == RAR_BLOCK_ENDARC: - more_vols = h.flags & RAR_ENDARC_NEXT_VOLUME - endarc = 1 + more_vols = (h.flags & RAR_ENDARC_NEXT_VOLUME) > 0 + endarc = True elif h.type == RAR_BLOCK_FILE: # RAR 2.x does not write RAR_BLOCK_ENDARC if h.flags & RAR_FILE_SPLIT_AFTER: - more_vols = 1 + more_vols = True # RAR 2.x does not set RAR_MAIN_FIRSTVOLUME if volume == 0 and h.flags & RAR_FILE_SPLIT_BEFORE: raise NeedFirstVolume("Need to start from first volume") + if h.needs_password(): + self._needs_password = True + # store it - self._process_entry(h) + self.process_entry(fd, h) + + if self._info_callback: + self._info_callback(h) # go to next header if h.add_size > 0: - fd.seek(h.file_offset + h.add_size, 0) + fd.seek(h.data_offset + h.add_size, 0) + + def process_entry(self, fd, item): + """Examine item, add into lookup cache.""" + raise NotImplementedError() - # AES encrypted headers - _last_aes_key = (None, None, None) # (salt, key, iv) def _decrypt_header(self, fd): - if not _have_crypto: - raise NoCrypto('Cannot parse encrypted headers - no crypto') - salt = fd.read(8) - if self._last_aes_key[0] == salt: - key, iv = self._last_aes_key[1:] - else: - key, iv = rar3_s2k(self._password, salt) - self._last_aes_key = (salt, key, iv) - return HeaderDecrypt(fd, key, iv) + raise NotImplementedError('_decrypt_header') + + def _parse_block_header(self, fd): + raise NotImplementedError('_parse_block_header') + + def _open_hack(self, inf, psw): + raise NotImplementedError('_open_hack') # read single header def _parse_header(self, fd): try: # handle encrypted headers - if self._main and self._main.flags & RAR_MAIN_PASSWORD: + if (self._main and self._main.flags & RAR_MAIN_PASSWORD) or self._hdrenc_main: if not self._password: return fd = self._decrypt_header(fd) @@ -859,11 +1081,168 @@ def _parse_header(self, fd): self._set_error('Broken header in RAR file') return None + # given current vol name, construct next one + def _next_volname(self, volfile): + if is_filelike(volfile): + raise IOError("Working on single FD") + if self._main.flags & RAR_MAIN_NEWNUMBERING: + return _next_newvol(volfile) + return _next_oldvol(volfile) + + def _set_error(self, msg, *args): + if args: + msg = msg % args + self._parse_error = msg + if self._strict: + raise BadRarFile(msg) + + def open(self, inf, psw): + """Return stream object for file data.""" + + if inf.file_redir: + # cannot leave to unrar as it expects copied file to exist + if inf.file_redir[0] in (RAR5_XREDIR_FILE_COPY, RAR5_XREDIR_HARD_LINK): + inf = self.getinfo(inf.file_redir[2]) + if not inf: + raise BadRarFile('cannot find copied file') + + if inf.flags & RAR_FILE_SPLIT_BEFORE: + raise NeedFirstVolume("Partial file, please start from first volume: " + inf.filename) + + # is temp write usable? + use_hack = 1 + if not self._main: + use_hack = 0 + elif self._main._must_disable_hack(): + use_hack = 0 + elif inf._must_disable_hack(): + use_hack = 0 + elif is_filelike(self._rarfile): + pass + elif inf.file_size > HACK_SIZE_LIMIT: + use_hack = 0 + elif not USE_EXTRACT_HACK: + use_hack = 0 + + # now extract + if inf.compress_type == RAR_M0 and (inf.flags & RAR_FILE_PASSWORD) == 0 and inf.file_redir is None: + return self._open_clear(inf) + elif use_hack: + return self._open_hack(inf, psw) + elif is_filelike(self._rarfile): + return self._open_unrar_membuf(self._rarfile, inf, psw) + else: + return self._open_unrar(self._rarfile, inf, psw) + + def _open_clear(self, inf): + return DirectReader(self, inf) + + def _open_hack_core(self, inf, psw, prefix, suffix): + + size = inf.compress_size + inf.header_size + rf = XFile(inf.volume_file, 0) + rf.seek(inf.header_offset) + + tmpfd, tmpname = mkstemp(suffix='.rar') + tmpf = os.fdopen(tmpfd, "wb") + + try: + tmpf.write(prefix) + while size > 0: + if size > BSIZE: + buf = rf.read(BSIZE) + else: + buf = rf.read(size) + if not buf: + raise BadRarFile('read failed: ' + inf.filename) + tmpf.write(buf) + size -= len(buf) + tmpf.write(suffix) + tmpf.close() + rf.close() + except: + rf.close() + tmpf.close() + os.unlink(tmpname) + raise + + return self._open_unrar(tmpname, inf, psw, tmpname) + + # write in-memory archive to temp file - needed for solid archives + def _open_unrar_membuf(self, memfile, inf, psw): + tmpname = membuf_tempfile(memfile) + return self._open_unrar(tmpname, inf, psw, tmpname, force_file=True) + + # extract using unrar + def _open_unrar(self, rarfile, inf, psw=None, tmpfile=None, force_file=False): + cmd = [UNRAR_TOOL] + list(OPEN_ARGS) + add_password_arg(cmd, psw) + cmd.append("--") + cmd.append(rarfile) + + # not giving filename avoids encoding related problems + if not tmpfile or force_file: + fn = inf.filename + if PATH_SEP != os.sep: + fn = fn.replace(PATH_SEP, os.sep) + cmd.append(fn) + + # read from unrar pipe + return PipeReader(self, inf, cmd, tmpfile) + +# +# RAR3 format +# + +class Rar3Info(RarInfo): + """RAR3 specific fields.""" + extract_version = 15 + salt = None + add_size = 0 + header_crc = None + header_size = None + header_offset = None + data_offset = None + _md_class = None + _md_expect = None + + # make sure some rar5 fields are always present + file_redir = None + blake2sp_hash = None + + def _must_disable_hack(self): + if self.type == RAR_BLOCK_FILE: + if self.flags & RAR_FILE_PASSWORD: + return True + elif self.flags & (RAR_FILE_SPLIT_BEFORE | RAR_FILE_SPLIT_AFTER): + return True + elif self.type == RAR_BLOCK_MAIN: + if self.flags & (RAR_MAIN_SOLID | RAR_MAIN_PASSWORD): + return True + return False + + +class RAR3Parser(CommonParser): + """Parse RAR3 file format. + """ + _expect_sig = RAR_ID + _last_aes_key = (None, None, None) # (salt, key, iv) + + def _decrypt_header(self, fd): + if not _have_crypto: + raise NoCrypto('Cannot parse encrypted headers - no crypto') + salt = fd.read(8) + if self._last_aes_key[0] == salt: + key, iv = self._last_aes_key[1:] + else: + key, iv = rar3_s2k(self._password, salt) + self._last_aes_key = (salt, key, iv) + return HeaderDecrypt(fd, key, iv) + # common header def _parse_block_header(self, fd): - h = RarInfo() + h = Rar3Info() h.header_offset = fd.tell() - h.comment = None # read and parse base header buf = fd.read(S_BLK_HDR.size) @@ -871,24 +1250,24 @@ def _parse_block_header(self, fd): return None t = S_BLK_HDR.unpack_from(buf) h.header_crc, h.type, h.flags, h.header_size = t - h.header_base = S_BLK_HDR.size - pos = S_BLK_HDR.size # read full header if h.header_size > S_BLK_HDR.size: - h.header_data = buf + fd.read(h.header_size - S_BLK_HDR.size) + hdata = buf + fd.read(h.header_size - S_BLK_HDR.size) else: - h.header_data = buf - h.file_offset = fd.tell() + hdata = buf + h.data_offset = fd.tell() # unexpected EOF? - if len(h.header_data) != h.header_size: + if len(hdata) != h.header_size: self._set_error('Unexpected EOF when reading header') return None + pos = S_BLK_HDR.size + # block has data assiciated with it? if h.flags & RAR_LONG_BLOCK: - h.add_size = S_LONG.unpack_from(h.header_data, pos)[0] + h.add_size, pos = load_le32(hdata, pos) else: h.add_size = 0 @@ -896,31 +1275,36 @@ def _parse_block_header(self, fd): if h.type == RAR_BLOCK_MARK: return h elif h.type == RAR_BLOCK_MAIN: - h.header_base += 6 + pos += 6 if h.flags & RAR_MAIN_ENCRYPTVER: - h.header_base += 1 + pos += 1 + crc_pos = pos if h.flags & RAR_MAIN_COMMENT: - self._parse_subblocks(h, h.header_base) - self.comment = h.comment + self._parse_subblocks(h, hdata, pos) elif h.type == RAR_BLOCK_FILE: - self._parse_file_header(h, pos) + pos = self._parse_file_header(h, hdata, pos - 4) + crc_pos = pos + if h.flags & RAR_FILE_COMMENT: + pos = self._parse_subblocks(h, hdata, pos) elif h.type == RAR_BLOCK_SUB: - self._parse_file_header(h, pos) - h.header_base = h.header_size + pos = self._parse_file_header(h, hdata, pos - 4) + crc_pos = h.header_size elif h.type == RAR_BLOCK_OLD_AUTH: - h.header_base += 8 + pos += 8 + crc_pos = pos elif h.type == RAR_BLOCK_OLD_EXTRA: - h.header_base += 7 + pos += 7 + crc_pos = pos else: - h.header_base = h.header_size + crc_pos = h.header_size # check crc if h.type == RAR_BLOCK_OLD_SUB: - crcdat = h.header_data[2:] + fd.read(h.add_size) + crcdat = hdata[2:] + fd.read(h.add_size) else: - crcdat = h.header_data[2:h.header_base] + crcdat = hdata[2:crc_pos] - calc_crc = crc32(crcdat) & 0xFFFF + calc_crc = rar_crc32(crcdat) & 0xFFFF # return good header if h.header_crc == calc_crc: @@ -928,39 +1312,42 @@ def _parse_block_header(self, fd): # header parsing failed. self._set_error('Header CRC error (%02x): exp=%x got=%x (xlen = %d)', - h.type, h.header_crc, calc_crc, len(crcdat)) + h.type, h.header_crc, calc_crc, len(crcdat)) # instead panicing, send eof return None # read file-specific header - def _parse_file_header(self, h, pos): - fld = S_FILE_HDR.unpack_from(h.header_data, pos) + def _parse_file_header(self, h, hdata, pos): + fld = S_FILE_HDR.unpack_from(hdata, pos) + pos += S_FILE_HDR.size + h.compress_size = fld[0] h.file_size = fld[1] h.host_os = fld[2] h.CRC = fld[3] h.date_time = parse_dos_time(fld[4]) + h.mtime = to_datetime(h.date_time) h.extract_version = fld[5] h.compress_type = fld[6] - h.name_size = fld[7] + name_size = fld[7] h.mode = fld[8] - pos += S_FILE_HDR.size + + h._md_class = CRC32Context + h._md_expect = h.CRC if h.flags & RAR_FILE_LARGE: - h1 = S_LONG.unpack_from(h.header_data, pos)[0] - h2 = S_LONG.unpack_from(h.header_data, pos + 4)[0] + h1, pos = load_le32(hdata, pos) + h2, pos = load_le32(hdata, pos) h.compress_size |= h1 << 32 h.file_size |= h2 << 32 - pos += 8 h.add_size = h.compress_size - name = h.header_data[pos : pos + h.name_size ] - pos += h.name_size + name, pos = load_bytes(hdata, name_size, pos) if h.flags & RAR_FILE_UNICODE: nul = name.find(ZERO) h.orig_filename = name[:nul] - u = UnicodeFilename(h.orig_filename, name[nul + 1 : ]) + u = UnicodeFilename(h.orig_filename, name[nul + 1:]) h.filename = u.decode() # if parsing failed fall back to simple name @@ -975,278 +1362,534 @@ def _parse_file_header(self, h, pos): h.filename = h.filename.replace('\\', PATH_SEP) if h.flags & RAR_FILE_SALT: - h.salt = h.header_data[pos : pos + 8] - pos += 8 + h.salt, pos = load_bytes(hdata, 8, pos) else: h.salt = None # optional extended time stamps if h.flags & RAR_FILE_EXTTIME: - pos = self._parse_ext_time(h, pos) + pos = _parse_ext_time(h, hdata, pos) else: h.mtime = h.atime = h.ctime = h.arctime = None - # base header end - h.header_base = pos - - if h.flags & RAR_FILE_COMMENT: - self._parse_subblocks(h, pos) - - # convert timestamps - if USE_DATETIME: - h.date_time = to_datetime(h.date_time) - h.mtime = to_datetime(h.mtime) - h.atime = to_datetime(h.atime) - h.ctime = to_datetime(h.ctime) - h.arctime = to_datetime(h.arctime) - - # .mtime is .date_time with more precision - if h.mtime: - if USE_DATETIME: - h.date_time = h.mtime - else: - # keep seconds int - h.date_time = h.mtime[:5] + (int(h.mtime[5]),) - return pos # find old-style comment subblock - def _parse_subblocks(self, h, pos): - hdata = h.header_data + def _parse_subblocks(self, h, hdata, pos): while pos < len(hdata): # ordinary block header t = S_BLK_HDR.unpack_from(hdata, pos) - scrc, stype, sflags, slen = t + ___scrc, stype, sflags, slen = t pos_next = pos + slen pos += S_BLK_HDR.size - # corrupt header - if pos_next < pos: - break + # corrupt header + if pos_next < pos: + break + + # followed by block-specific header + if stype == RAR_BLOCK_OLD_COMMENT and pos + S_COMMENT_HDR.size <= pos_next: + declen, ver, meth, crc = S_COMMENT_HDR.unpack_from(hdata, pos) + pos += S_COMMENT_HDR.size + data = hdata[pos : pos_next] + cmt = rar3_decompress(ver, meth, data, declen, sflags, + crc, self._password) + if not self._crc_check: + h.comment = self._decode_comment(cmt) + elif rar_crc32(cmt) & 0xFFFF == crc: + h.comment = self._decode_comment(cmt) + + pos = pos_next + return pos + + def _read_comment_v3(self, inf, psw=None): + + # read data + with XFile(inf.volume_file) as rf: + rf.seek(inf.data_offset) + data = rf.read(inf.compress_size) + + # decompress + cmt = rar3_decompress(inf.extract_version, inf.compress_type, data, + inf.file_size, inf.flags, inf.CRC, psw, inf.salt) + + # check crc + if self._crc_check: + crc = rar_crc32(cmt) + if crc != inf.CRC: + return None + + return self._decode_comment(cmt) + + def _decode(self, val): + for c in TRY_ENCODINGS: + try: + return val.decode(c) + except UnicodeError: + pass + return val.decode(self._charset, 'replace') + + def _decode_comment(self, val): + return self._decode(val) + + def process_entry(self, fd, item): + if item.type == RAR_BLOCK_FILE: + # use only first part + if (item.flags & RAR_FILE_SPLIT_BEFORE) == 0: + self._info_map[item.filename] = item + self._info_list.append(item) + elif len(self._info_list) > 0: + # final crc is in last block + old = self._info_list[-1] + old.CRC = item.CRC + old._md_expect = item._md_expect + old.compress_size += item.compress_size + + # parse new-style comment + if item.type == RAR_BLOCK_SUB and item.filename == 'CMT': + if item.flags & (RAR_FILE_SPLIT_BEFORE | RAR_FILE_SPLIT_AFTER): + pass + elif item.flags & RAR_FILE_SOLID: + # file comment + cmt = self._read_comment_v3(item, self._password) + if len(self._info_list) > 0: + old = self._info_list[-1] + old.comment = cmt + else: + # archive comment + cmt = self._read_comment_v3(item, self._password) + self.comment = cmt - # followed by block-specific header - if stype == RAR_BLOCK_OLD_COMMENT and pos + S_COMMENT_HDR.size <= pos_next: - declen, ver, meth, crc = S_COMMENT_HDR.unpack_from(hdata, pos) - pos += S_COMMENT_HDR.size - data = hdata[pos : pos_next] - cmt = rar_decompress(ver, meth, data, declen, sflags, - crc, self._password) - if not self._crc_check: - h.comment = self._decode_comment(cmt) - elif crc32(cmt) & 0xFFFF == crc: - h.comment = self._decode_comment(cmt) + if item.type == RAR_BLOCK_MAIN: + if item.flags & RAR_MAIN_COMMENT: + self.comment = item.comment + if item.flags & RAR_MAIN_PASSWORD: + self._needs_password = True - pos = pos_next + # put file compressed data into temporary .rar archive, and run + # unrar on that, thus avoiding unrar going over whole archive + def _open_hack(self, inf, psw): + # create main header: crc, type, flags, size, res1, res2 + prefix = RAR_ID + S_BLK_HDR.pack(0x90CF, 0x73, 0, 13) + ZERO * (2 + 4) + return self._open_hack_core(inf, psw, prefix, EMPTY) - def _parse_ext_time(self, h, pos): - data = h.header_data +# +# RAR5 format +# - # flags and rest of data can be missing - flags = 0 - if pos + 2 <= len(data): - flags = S_SHORT.unpack_from(data, pos)[0] - pos += 2 +class Rar5Info(RarInfo): + """Shared fields for RAR5 records. + """ + extract_version = 50 + header_crc = None + header_size = None + header_offset = None + data_offset = None + + # type=all + block_type = None + block_flags = None + add_size = 0 + block_extra_size = 0 + + # type=MAIN + volume_number = None + _md_class = None + _md_expect = None + + def _must_disable_hack(self): + return False - h.mtime, pos = self._parse_xtime(flags >> 3*4, data, pos, h.date_time) - h.ctime, pos = self._parse_xtime(flags >> 2*4, data, pos) - h.atime, pos = self._parse_xtime(flags >> 1*4, data, pos) - h.arctime, pos = self._parse_xtime(flags >> 0*4, data, pos) - return pos - def _parse_xtime(self, flag, data, pos, dostime = None): - unit = 10000000.0 # 100 ns units - if flag & 8: - if not dostime: - t = S_LONG.unpack_from(data, pos)[0] - dostime = parse_dos_time(t) - pos += 4 - rem = 0 - cnt = flag & 3 - for i in range(cnt): - b = S_BYTE.unpack_from(data, pos)[0] - rem = (b << 16) | (rem >> 8) - pos += 1 - sec = dostime[5] + rem / unit - if flag & 4: - sec += 1 - dostime = dostime[:5] + (sec,) - return dostime, pos +class Rar5BaseFile(Rar5Info): + """Shared sturct for file & service record. + """ + type = -1 + file_flags = None + file_encryption = (0, 0, 0, EMPTY, EMPTY, EMPTY) + file_compress_flags = None + file_redir = None + file_owner = None + file_version = None + blake2sp_hash = None + + def _must_disable_hack(self): + if self.flags & RAR_FILE_PASSWORD: + return True + if self.block_flags & (RAR5_BLOCK_FLAG_SPLIT_BEFORE | RAR5_BLOCK_FLAG_SPLIT_AFTER): + return True + if self.file_compress_flags & RAR5_COMPR_SOLID: + return True + if self.file_redir: + return True + return False - # given current vol name, construct next one - def _next_volname(self, volfile): - if is_filelike(volfile): - raise IOError("Working on single FD") - if self._main.flags & RAR_MAIN_NEWNUMBERING: - return self._next_newvol(volfile) - return self._next_oldvol(volfile) - - # new-style next volume - def _next_newvol(self, volfile): - i = len(volfile) - 1 - while i >= 0: - if volfile[i] >= '0' and volfile[i] <= '9': - return self._inc_volname(volfile, i) - i -= 1 - raise BadRarName("Cannot construct volume name: "+volfile) - - # old-style next volume - def _next_oldvol(self, volfile): - # rar -> r00 - if volfile[-4:].lower() == '.rar': - return volfile[:-2] + '00' - return self._inc_volname(volfile, len(volfile) - 1) - - # increase digits with carry, otherwise just increment char - def _inc_volname(self, volfile, i): - fn = list(volfile) - while i >= 0: - if fn[i] != '9': - fn[i] = chr(ord(fn[i]) + 1) - break - fn[i] = '0' - i -= 1 - return ''.join(fn) - def _open_clear(self, inf): - return DirectReader(self, inf) +class Rar5FileInfo(Rar5BaseFile): + """RAR5 file record. + """ + type = RAR_BLOCK_FILE - # put file compressed data into temporary .rar archive, and run - # unrar on that, thus avoiding unrar going over whole archive - def _open_hack(self, inf, psw = None): - BSIZE = 32*1024 - size = inf.compress_size + inf.header_size - rf = XFile(inf.volume_file, 0) - rf.seek(inf.header_offset) +class Rar5ServiceInfo(Rar5BaseFile): + """RAR5 service record. + """ + type = RAR_BLOCK_SUB - tmpfd, tmpname = mkstemp(suffix='.rar') - tmpf = os.fdopen(tmpfd, "wb") - try: - # create main header: crc, type, flags, size, res1, res2 - mh = S_BLK_HDR.pack(0x90CF, 0x73, 0, 13) + ZERO * (2+4) - tmpf.write(RAR_ID + mh) - while size > 0: - if size > BSIZE: - buf = rf.read(BSIZE) - else: - buf = rf.read(size) - if not buf: - raise BadRarFile('read failed: ' + inf.filename) - tmpf.write(buf) - size -= len(buf) - tmpf.close() - rf.close() - except: - rf.close() - tmpf.close() - os.unlink(tmpname) - raise +class Rar5MainInfo(Rar5Info): + """RAR5 archive main record. + """ + type = RAR_BLOCK_MAIN + main_flags = None + main_volume_number = None - return self._open_unrar(tmpname, inf, psw, tmpname) + def _must_disable_hack(self): + if self.main_flags & RAR5_MAIN_FLAG_SOLID: + return True + return False - def _read_comment_v3(self, inf, psw=None): - # read data - rf = XFile(inf.volume_file) - rf.seek(inf.file_offset) - data = rf.read(inf.compress_size) - rf.close() +class Rar5EncryptionInfo(Rar5Info): + """RAR5 archive header encryption record. + """ + type = RAR5_BLOCK_ENCRYPTION + encryption_algo = None + encryption_flags = None + encryption_kdf_count = None + encryption_salt = None + encryption_check_value = None - # decompress - cmt = rar_decompress(inf.extract_version, inf.compress_type, data, - inf.file_size, inf.flags, inf.CRC, psw, inf.salt) + def needs_password(self): + return True - # check crc - if self._crc_check: - crc = crc32(cmt) - if crc < 0: - crc += (1 << 32) - if crc != inf.CRC: - return None - return self._decode_comment(cmt) +class Rar5EndArcInfo(Rar5Info): + """RAR5 end of archive record. + """ + type = RAR_BLOCK_ENDARC + endarc_flags = None - # write in-memory archive to temp file - needed for solid archives - def _open_unrar_membuf(self, memfile, inf, psw): - tmpname = membuf_tempfile(memfile) - return self._open_unrar(tmpname, inf, psw, tmpname) - # extract using unrar - def _open_unrar(self, rarfile, inf, psw = None, tmpfile = None): - if is_filelike(rarfile): - raise ValueError("Cannot use unrar directly on memory buffer") - cmd = [UNRAR_TOOL] + list(OPEN_ARGS) - add_password_arg(cmd, psw) - cmd.append("--") - cmd.append(rarfile) +class RAR5Parser(CommonParser): + """Parse RAR5 format. + """ + _expect_sig = RAR5_ID + _hdrenc_main = None - # not giving filename avoids encoding related problems - if not tmpfile: - fn = inf.filename - if PATH_SEP != os.sep: - fn = fn.replace(PATH_SEP, os.sep) - cmd.append(fn) + # AES encrypted headers + _last_aes256_key = (-1, None, None) # (kdf_count, salt, key) + + def _gen_key(self, kdf_count, salt): + if self._last_aes256_key[:2] == (kdf_count, salt): + return self._last_aes256_key[2] + if kdf_count > 24: + raise BadRarFile('Too large kdf_count') + psw = self._password + if isinstance(psw, unicode): + psw = psw.encode('utf8') + key = pbkdf2_sha256(psw, salt, 1 << kdf_count) + self._last_aes256_key = (kdf_count, salt, key) + return key - # read from unrar pipe - return PipeReader(self, inf, cmd, tmpfile) + def _decrypt_header(self, fd): + if not _have_crypto: + raise NoCrypto('Cannot parse encrypted headers - no crypto') + h = self._hdrenc_main + key = self._gen_key(h.encryption_kdf_count, h.encryption_salt) + iv = fd.read(16) + return HeaderDecrypt(fd, key, iv) - def _decode(self, val): - for c in TRY_ENCODINGS: - try: - return val.decode(c) - except UnicodeError: - pass - return val.decode(self._charset, 'replace') + # common header + def _parse_block_header(self, fd): + header_offset = fd.tell() - def _decode_comment(self, val): - if UNICODE_COMMENTS: - return self._decode(val) - return val + preload = 4 + 3 + start_bytes = fd.read(preload) + header_crc, pos = load_le32(start_bytes, 0) + hdrlen, pos = load_vint(start_bytes, pos) + if hdrlen > 2 * 1024 * 1024: + return None + header_size = pos + hdrlen - # call unrar to extract a file - def _extract(self, fnlist, path=None, psw=None): - cmd = [UNRAR_TOOL] + list(EXTRACT_ARGS) + # read full header, check for EOF + hdata = start_bytes + fd.read(header_size - len(start_bytes)) + if len(hdata) != header_size: + self._set_error('Unexpected EOF when reading header') + return None + data_offset = fd.tell() - # pasoword - psw = psw or self._password - add_password_arg(cmd, psw) - cmd.append('--') + calc_crc = rar_crc32(memoryview(hdata)[4:]) + if header_crc != calc_crc: + # header parsing failed. + self._set_error('Header CRC error: exp=%x got=%x (xlen = %d)', + header_crc, calc_crc, len(hdata)) + return None - # rar file - if is_filelike(self.rarfile): - tmpname = membuf_tempfile(self.rarfile) - cmd.append(tmpname) + block_type, pos = load_vint(hdata, pos) + + if block_type == RAR5_BLOCK_MAIN: + h, pos = self._parse_block_common(Rar5MainInfo(), hdata) + h = self._parse_main_block(h, hdata, pos) + elif block_type == RAR5_BLOCK_FILE: + h, pos = self._parse_block_common(Rar5FileInfo(), hdata) + h = self._parse_file_block(h, hdata, pos) + elif block_type == RAR5_BLOCK_SERVICE: + h, pos = self._parse_block_common(Rar5ServiceInfo(), hdata) + h = self._parse_file_block(h, hdata, pos) + elif block_type == RAR5_BLOCK_ENCRYPTION: + h, pos = self._parse_block_common(Rar5EncryptionInfo(), hdata) + h = self._parse_encryption_block(h, hdata, pos) + elif block_type == RAR5_BLOCK_ENDARC: + h, pos = self._parse_block_common(Rar5EndArcInfo(), hdata) + h = self._parse_endarc_block(h, hdata, pos) + else: + h = None + if h: + h.header_offset = header_offset + h.data_offset = data_offset + return h + + def _parse_block_common(self, h, hdata): + h.header_crc, pos = load_le32(hdata, 0) + hdrlen, pos = load_vint(hdata, pos) + h.header_size = hdrlen + pos + h.block_type, pos = load_vint(hdata, pos) + h.block_flags, pos = load_vint(hdata, pos) + + if h.block_flags & RAR5_BLOCK_FLAG_EXTRA_DATA: + h.block_extra_size, pos = load_vint(hdata, pos) + if h.block_flags & RAR5_BLOCK_FLAG_DATA_AREA: + h.add_size, pos = load_vint(hdata, pos) + + h.compress_size = h.add_size + + if h.block_flags & RAR5_BLOCK_FLAG_SKIP_IF_UNKNOWN: + h.flags |= RAR_SKIP_IF_UNKNOWN + if h.block_flags & RAR5_BLOCK_FLAG_DATA_AREA: + h.flags |= RAR_LONG_BLOCK + return h, pos + + def _parse_main_block(self, h, hdata, pos): + h.main_flags, pos = load_vint(hdata, pos) + if h.main_flags & RAR5_MAIN_FLAG_HAS_VOLNR: + h.main_volume_number = load_vint(hdata, pos) + + h.flags |= RAR_MAIN_NEWNUMBERING + if h.main_flags & RAR5_MAIN_FLAG_SOLID: + h.flags |= RAR_MAIN_SOLID + if h.main_flags & RAR5_MAIN_FLAG_ISVOL: + h.flags |= RAR_MAIN_VOLUME + if h.main_flags & RAR5_MAIN_FLAG_RECOVERY: + h.flags |= RAR_MAIN_RECOVERY + if self._hdrenc_main: + h.flags |= RAR_MAIN_PASSWORD + if h.main_flags & RAR5_MAIN_FLAG_HAS_VOLNR == 0: + h.flags |= RAR_MAIN_FIRSTVOLUME + + return h + + def _parse_file_block(self, h, hdata, pos): + h.file_flags, pos = load_vint(hdata, pos) + h.file_size, pos = load_vint(hdata, pos) + h.mode, pos = load_vint(hdata, pos) + + if h.file_flags & RAR5_FILE_FLAG_HAS_MTIME: + h.mtime, pos = load_unixtime(hdata, pos) + h.date_time = h.mtime.timetuple()[:6] + if h.file_flags & RAR5_FILE_FLAG_HAS_CRC32: + h.CRC, pos = load_le32(hdata, pos) + h._md_class = CRC32Context + h._md_expect = h.CRC + + h.file_compress_flags, pos = load_vint(hdata, pos) + h.file_host_os, pos = load_vint(hdata, pos) + h.orig_filename, pos = load_vstr(hdata, pos) + h.filename = h.orig_filename.decode('utf8', 'replace') + + # use compatible values + if h.file_host_os == RAR5_OS_WINDOWS: + h.host_os = RAR_OS_WIN32 + else: + h.host_os = RAR_OS_UNIX + h.compress_type = RAR_M0 + ((h.file_compress_flags >> 7) & 7) + + if h.block_extra_size: + # allow 1 byte of garbage + while pos < len(hdata) - 1: + xsize, pos = load_vint(hdata, pos) + xdata, pos = load_bytes(hdata, xsize, pos) + self._process_file_extra(h, xdata) + + if h.block_flags & RAR5_BLOCK_FLAG_SPLIT_BEFORE: + h.flags |= RAR_FILE_SPLIT_BEFORE + if h.block_flags & RAR5_BLOCK_FLAG_SPLIT_AFTER: + h.flags |= RAR_FILE_SPLIT_AFTER + if h.file_flags & RAR5_FILE_FLAG_ISDIR: + h.flags |= RAR_FILE_DIRECTORY + if h.file_compress_flags & RAR5_COMPR_SOLID: + h.flags |= RAR_FILE_SOLID + + return h + + def _parse_endarc_block(self, h, hdata, pos): + h.endarc_flags, pos = load_vint(hdata, pos) + if h.endarc_flags & RAR5_ENDARC_FLAG_NEXT_VOL: + h.flags |= RAR_ENDARC_NEXT_VOLUME + return h + + def _parse_encryption_block(self, h, hdata, pos): + h.encryption_algo, pos = load_vint(hdata, pos) + h.encryption_flags, pos = load_vint(hdata, pos) + h.encryption_kdf_count, pos = load_byte(hdata, pos) + h.encryption_salt, pos = load_bytes(hdata, 16, pos) + if h.encryption_flags & RAR5_ENC_FLAG_HAS_CHECKVAL: + h.encryption_check_value = load_bytes(hdata, 12, pos) + if h.encryption_algo != RAR5_XENC_CIPHER_AES256: + raise BadRarFile('Unsupported header encryption cipher') + self._hdrenc_main = h + return h + + # file extra record + def _process_file_extra(self, h, xdata): + xtype, pos = load_vint(xdata, 0) + if xtype == RAR5_XFILE_TIME: + self._parse_file_xtime(h, xdata, pos) + elif xtype == RAR5_XFILE_ENCRYPTION: + self._parse_file_encryption(h, xdata, pos) + elif xtype == RAR5_XFILE_HASH: + self._parse_file_hash(h, xdata, pos) + elif xtype == RAR5_XFILE_VERSION: + self._parse_file_version(h, xdata, pos) + elif xtype == RAR5_XFILE_REDIR: + self._parse_file_redir(h, xdata, pos) + elif xtype == RAR5_XFILE_OWNER: + self._parse_file_owner(h, xdata, pos) + elif xtype == RAR5_XFILE_SERVICE: + pass else: - tmpname = None - cmd.append(self.rarfile) + pass - # file list - for fn in fnlist: - if os.sep != PATH_SEP: - fn = fn.replace(PATH_SEP, os.sep) - cmd.append(fn) + # extra block for file time record + def _parse_file_xtime(self, h, xdata, pos): + tflags, pos = load_vint(xdata, pos) + ldr = load_windowstime + if tflags & RAR5_XTIME_UNIXTIME: + ldr = load_unixtime + if tflags & RAR5_XTIME_HAS_MTIME: + h.mtime, pos = ldr(xdata, pos) + h.date_time = h.mtime.timetuple()[:6] + if tflags & RAR5_XTIME_HAS_CTIME: + h.ctime, pos = ldr(xdata, pos) + if tflags & RAR5_XTIME_HAS_ATIME: + h.atime, pos = ldr(xdata, pos) + + # just remember encryption info + def _parse_file_encryption(self, h, xdata, pos): + algo, pos = load_vint(xdata, pos) + flags, pos = load_vint(xdata, pos) + kdf_count, pos = load_byte(xdata, pos) + salt, pos = load_bytes(xdata, 16, pos) + iv, pos = load_bytes(xdata, 16, pos) + checkval = None + if flags & RAR5_XENC_CHECKVAL: + checkval, pos = load_bytes(xdata, 12, pos) + if flags & RAR5_XENC_TWEAKED: + h._md_expect = None + h._md_class = NoHashContext + + h.file_encryption = (algo, flags, kdf_count, salt, iv, checkval) + h.flags |= RAR_FILE_PASSWORD + + def _parse_file_hash(self, h, xdata, pos): + hash_type, pos = load_vint(xdata, pos) + if hash_type == RAR5_XHASH_BLAKE2SP: + h.blake2sp_hash, pos = load_bytes(xdata, 32, pos) + if _have_blake2 and (h.file_encryption[1] & RAR5_XENC_TWEAKED) == 0: + h._md_class = Blake2SP + h._md_expect = h.blake2sp_hash + + def _parse_file_version(self, h, xdata, pos): + flags, pos = load_vint(xdata, pos) + version, pos = load_vint(xdata, pos) + h.file_version = (flags, version) + + def _parse_file_redir(self, h, xdata, pos): + redir_type, pos = load_vint(xdata, pos) + redir_flags, pos = load_vint(xdata, pos) + redir_name, pos = load_vstr(xdata, pos) + redir_name = redir_name.decode('utf8', 'replace') + h.file_redir = (redir_type, redir_flags, redir_name) + + def _parse_file_owner(self, h, xdata, pos): + user_name = group_name = user_id = group_id = None + + flags, pos = load_vint(xdata, pos) + if flags & RAR5_XOWNER_UNAME: + user_name, pos = load_vstr(xdata, pos) + if flags & RAR5_XOWNER_GNAME: + group_name, pos = load_vstr(xdata, pos) + if flags & RAR5_XOWNER_UID: + user_id, pos = load_vint(xdata, pos) + if flags & RAR5_XOWNER_GID: + group_id, pos = load_vint(xdata, pos) + + h.file_owner = (user_name, group_name, user_id, group_id) + + def process_entry(self, fd, item): + if item.block_type == RAR5_BLOCK_FILE: + # use only first part + if (item.block_flags & RAR5_BLOCK_FLAG_SPLIT_BEFORE) == 0: + self._info_map[item.filename] = item + self._info_list.append(item) + elif len(self._info_list) > 0: + # final crc is in last block + old = self._info_list[-1] + old.CRC = item.CRC + old._md_expect = item._md_expect + old.blake2sp_hash = item.blake2sp_hash + old.compress_size += item.compress_size + elif item.block_type == RAR5_BLOCK_SERVICE: + if item.filename == 'CMT': + self._load_comment(fd, item) - # destination path - if path is not None: - cmd.append(path + os.sep) + def _load_comment(self, fd, item): + if item.block_flags & (RAR5_BLOCK_FLAG_SPLIT_BEFORE | RAR5_BLOCK_FLAG_SPLIT_AFTER): + return None + if item.compress_type != RAR_M0: + return None - # call - try: - p = custom_popen(cmd) - output = p.communicate()[0] - check_returncode(p, output) - finally: - if tmpname: - os.unlink(tmpname) + if item.flags & RAR_FILE_PASSWORD: + algo, ___flags, kdf_count, salt, iv, ___checkval = item.file_encryption + if algo != RAR5_XENC_CIPHER_AES256: + return None + key = self._gen_key(kdf_count, salt) + f = HeaderDecrypt(fd, key, iv) + cmt = f.read(item.file_size) + else: + # archive comment + with self._open_clear(item) as cmtstream: + cmt = cmtstream.read() + + # rar bug? - appends zero to comment + cmt = cmt.split(ZERO, 1)[0] + self.comment = cmt.decode('utf8') + + def _open_hack(self, inf, psw): + # len, type, blk_flags, flags + main_hdr = b'\x03\x01\x00\x00' + endarc_hdr = b'\x03\x05\x00\x00' + main_hdr = S_LONG.pack(rar_crc32(main_hdr)) + main_hdr + endarc_hdr = S_LONG.pack(rar_crc32(endarc_hdr)) + endarc_hdr + return self._open_hack_core(inf, psw, RAR5_ID + main_hdr, endarc_hdr) ## ## Utility classes ## class UnicodeFilename(object): - """Handle unicode filename decompression""" - + """Handle RAR3 unicode filename decompression. + """ def __init__(self, name, encdata): self.std_name = bytearray(name) self.encdata = bytearray(encdata) @@ -1255,6 +1898,7 @@ def __init__(self, name, encdata): self.failed = 0 def enc_byte(self): + """Copy encoded byte.""" try: c = self.encdata[self.encpos] self.encpos += 1 @@ -1264,6 +1908,7 @@ def enc_byte(self): return 0 def std_byte(self): + """Copy byte from 8-bit representation.""" try: return self.std_name[self.pos] except IndexError: @@ -1271,11 +1916,13 @@ def std_byte(self): return ord('?') def put(self, lo, hi): + """Copy 16-bit value to result.""" self.buf.append(lo) self.buf.append(hi) self.pos += 1 def decode(self): + """Decompress compressed UTF16 value.""" hi = self.enc_byte() flagbits = 0 while self.encpos < len(self.encdata): @@ -1294,11 +1941,11 @@ def decode(self): n = self.enc_byte() if n & 0x80: c = self.enc_byte() - for i in range((n & 0x7f) + 2): + for _ in range((n & 0x7f) + 2): lo = (self.std_byte() + c) & 0xFF self.put(lo, hi) else: - for i in range(n + 2): + for _ in range(n + 2): self.put(self.std_byte(), 0) return self.buf.decode("utf-16le", "replace") @@ -1311,77 +1958,78 @@ class RarExtFile(RawIOBase): Behaviour: - no short reads - .read() and .readinfo() read as much as requested. - no internal buffer, use io.BufferedReader for that. - - If :mod:`io` module is available (Python 2.6+, 3.x), then this calls - will inherit from :class:`io.RawIOBase` class. This makes line-based - access available: :meth:`RarExtFile.readline` and ``for ln in f``. """ #: Filename of the archive entry name = None - def __init__(self, rf, inf): + def __init__(self, parser, inf): + """Open archive entry. + """ super(RarExtFile, self).__init__() # standard io.* properties self.name = inf.filename self.mode = 'rb' - self.rf = rf - self.inf = inf - self.crc_check = rf._crc_check - self.fd = None - self.CRC = 0 - self.remain = 0 - self.returncode = 0 + self._parser = parser + self._inf = inf + self._fd = None + self._remain = 0 + self._returncode = 0 + + self._md_context = None self._open() def _open(self): - if self.fd: - self.fd.close() - self.fd = None - self.CRC = 0 - self.remain = self.inf.file_size + if self._fd: + self._fd.close() + md_class = self._inf._md_class or NoHashContext + self._md_context = md_class() + self._fd = None + self._remain = self._inf.file_size - def read(self, cnt = None): + def read(self, cnt=None): """Read all or specified amount of data from archive entry.""" # sanitize cnt if cnt is None or cnt < 0: - cnt = self.remain - elif cnt > self.remain: - cnt = self.remain + cnt = self._remain + elif cnt > self._remain: + cnt = self._remain if cnt == 0: return EMPTY # actual read data = self._read(cnt) if data: - self.CRC = crc32(data, self.CRC) - self.remain -= len(data) + self._md_context.update(data) + self._remain -= len(data) if len(data) != cnt: raise BadRarFile("Failed the read enough data") # done? - if not data or self.remain == 0: - #self.close() + if not data or self._remain == 0: + # self.close() self._check() return data def _check(self): """Check final CRC.""" - if not self.crc_check: + final = self._md_context.digest() + exp = self._inf._md_expect + if exp is None: return - if self.returncode: + if final is None: + return + if self._returncode: check_returncode(self, '') - if self.remain != 0: + if self._remain != 0: raise BadRarFile("Failed the read enough data") - crc = self.CRC - if crc < 0: - crc += (1 << 32) - if crc != self.inf.CRC: - raise BadRarFile("Corrupt file - CRC check failed: " + self.inf.filename) + if final != exp: + raise BadRarFile("Corrupt file - CRC check failed: %s - exp=%r got=%r" % ( + self._inf.filename, exp, final)) def _read(self, cnt): """Actual read that gets sanitized cnt.""" @@ -1391,9 +2039,9 @@ def close(self): super(RarExtFile, self).close() - if self.fd: - self.fd.close() - self.fd = None + if self._fd: + self._fd.close() + self._fd = None def __del__(self): """Hook delete to make sure tempfile is removed.""" @@ -1404,25 +2052,15 @@ def readinto(self, buf): Returns bytes read. """ - - data = self.read(len(buf)) - n = len(data) - try: - buf[:n] = data - except TypeError: - import array - if not isinstance(buf, array.array): - raise - buf[:n] = array.array(buf.typecode, data) - return n + raise NotImplementedError('readinto') def tell(self): """Return current reading position in uncompressed data.""" - return self.inf.file_size - self.remain + return self._inf.file_size - self._remain - def seek(self, ofs, whence = 0): + def seek(self, ofs, whence=0): """Seek in data. - + On uncompressed files, the seeking works by actual seeks so it's fast. On compresses files its slow - forward seeking happends by reading ahead, @@ -1430,9 +2068,9 @@ def seek(self, ofs, whence = 0): """ # disable crc check when seeking - self.crc_check = 0 + self._md_context = NoHashContext() - fsize = self.inf.file_size + fsize = self._inf.file_size cur_ofs = self.tell() if whence == 0: # seek from beginning of file @@ -1454,8 +2092,6 @@ def seek(self, ofs, whence = 0): if new_ofs >= cur_ofs: self._skip(new_ofs - cur_ofs) else: - # process old data ? - #self._skip(fsize - cur_ofs) # reopen and seek self._open() self._skip(new_ofs) @@ -1478,13 +2114,14 @@ def readable(self): def writable(self): """Returns False. - - Writing is not supported.""" + + Writing is not supported. + """ return False def seekable(self): """Returns True. - + Seeking is supported, although it's slow on compressed files. """ return True @@ -1499,23 +2136,23 @@ class PipeReader(RarExtFile): """Read data from pipe, handle tempfile cleanup.""" def __init__(self, rf, inf, cmd, tempfile=None): - self.cmd = cmd - self.proc = None - self.tempfile = tempfile + self._cmd = cmd + self._proc = None + self._tempfile = tempfile super(PipeReader, self).__init__(rf, inf) def _close_proc(self): - if not self.proc: + if not self._proc: return - if self.proc.stdout: - self.proc.stdout.close() - if self.proc.stdin: - self.proc.stdin.close() - if self.proc.stderr: - self.proc.stderr.close() - self.proc.wait() - self.returncode = self.proc.returncode - self.proc = None + if self._proc.stdout: + self._proc.stdout.close() + if self._proc.stdin: + self._proc.stdin.close() + if self._proc.stderr: + self._proc.stderr.close() + self._proc.wait() + self._returncode = self._proc.returncode + self._proc = None def _open(self): super(PipeReader, self)._open() @@ -1524,19 +2161,19 @@ def _open(self): self._close_proc() # launch new process - self.returncode = 0 - self.proc = custom_popen(self.cmd) - self.fd = self.proc.stdout + self._returncode = 0 + self._proc = custom_popen(self._cmd) + self._fd = self._proc.stdout # avoid situation where unrar waits on stdin - if self.proc.stdin: - self.proc.stdin.close() + if self._proc.stdin: + self._proc.stdin.close() def _read(self, cnt): """Read from pipe.""" # normal read is usually enough - data = self.fd.read(cnt) + data = self._fd.read(cnt) if len(data) == cnt or not data: return data @@ -1544,7 +2181,7 @@ def _read(self, cnt): buf = [data] cnt -= len(data) while cnt > 0: - data = self.fd.read(cnt) + data = self._fd.read(cnt) if not data: break cnt -= len(data) @@ -1557,42 +2194,45 @@ def close(self): self._close_proc() super(PipeReader, self).close() - if self.tempfile: + if self._tempfile: try: - os.unlink(self.tempfile) + os.unlink(self._tempfile) except OSError: pass - self.tempfile = None + self._tempfile = None def readinto(self, buf): """Zero-copy read directly into buffer.""" cnt = len(buf) - if cnt > self.remain: - cnt = self.remain + if cnt > self._remain: + cnt = self._remain vbuf = memoryview(buf) res = got = 0 while got < cnt: - res = self.fd.readinto(vbuf[got : cnt]) + res = self._fd.readinto(vbuf[got : cnt]) if not res: break - if self.crc_check: - self.CRC = crc32(vbuf[got : got + res], self.CRC) - self.remain -= res + self._md_context.update(vbuf[got : got + res]) + self._remain -= res got += res return got class DirectReader(RarExtFile): - """Read uncompressed data directly from archive.""" + """Read uncompressed data directly from archive. + """ + _cur = None + _cur_avail = None + _volfile = None def _open(self): super(DirectReader, self)._open() - self.volfile = self.inf.volume_file - self.fd = XFile(self.volfile, 0) - self.fd.seek(self.inf.header_offset, 0) - self.cur = self.rf._parse_header(self.fd) - self.cur_avail = self.cur.add_size + self._volfile = self._inf.volume_file + self._fd = XFile(self._volfile, 0) + self._fd.seek(self._inf.header_offset, 0) + self._cur = self._parser._parse_header(self._fd) + self._cur_avail = self._cur.add_size def _skip(self, cnt): """RAR Seek, skipping through rar files to get to correct position @@ -1600,19 +2240,19 @@ def _skip(self, cnt): while cnt > 0: # next vol needed? - if self.cur_avail == 0: + if self._cur_avail == 0: if not self._open_next(): break # fd is in read pos, do the read - if cnt > self.cur_avail: - cnt -= self.cur_avail - self.remain -= self.cur_avail - self.cur_avail = 0 + if cnt > self._cur_avail: + cnt -= self._cur_avail + self._remain -= self._cur_avail + self._cur_avail = 0 else: - self.fd.seek(cnt, 1) - self.cur_avail -= cnt - self.remain -= cnt + self._fd.seek(cnt, 1) + self._cur_avail -= cnt + self._remain -= cnt cnt = 0 def _read(self, cnt): @@ -1621,21 +2261,21 @@ def _read(self, cnt): buf = [] while cnt > 0: # next vol needed? - if self.cur_avail == 0: + if self._cur_avail == 0: if not self._open_next(): break # fd is in read pos, do the read - if cnt > self.cur_avail: - data = self.fd.read(self.cur_avail) + if cnt > self._cur_avail: + data = self._fd.read(self._cur_avail) else: - data = self.fd.read(cnt) + data = self._fd.read(cnt) if not data: break # got some data cnt -= len(data) - self.cur_avail -= len(data) + self._cur_avail -= len(data) buf.append(data) if len(buf) == 1: @@ -1646,31 +2286,34 @@ def _open_next(self): """Proceed to next volume.""" # is the file split over archives? - if (self.cur.flags & RAR_FILE_SPLIT_AFTER) == 0: + if (self._cur.flags & RAR_FILE_SPLIT_AFTER) == 0: return False - if self.fd: - self.fd.close() - self.fd = None + if self._fd: + self._fd.close() + self._fd = None # open next part - self.volfile = self.rf._next_volname(self.volfile) - fd = open(self.volfile, "rb", 0) - self.fd = fd + self._volfile = self._parser._next_volname(self._volfile) + fd = open(self._volfile, "rb", 0) + self._fd = fd + sig = fd.read(len(self._parser._expect_sig)) + if sig != self._parser._expect_sig: + raise BadRarFile("Invalid signature") # loop until first file header while 1: - cur = self.rf._parse_header(fd) + cur = self._parser._parse_header(fd) if not cur: raise BadRarFile("Unexpected EOF") if cur.type in (RAR_BLOCK_MARK, RAR_BLOCK_MAIN): if cur.add_size: fd.seek(cur.add_size, 1) continue - if cur.orig_filename != self.inf.orig_filename: + if cur.orig_filename != self._inf.orig_filename: raise BadRarFile("Did not found file entry") - self.cur = cur - self.cur_avail = cur.add_size + self._cur = cur + self._cur_avail = cur.add_size return True def readinto(self, buf): @@ -1679,23 +2322,22 @@ def readinto(self, buf): vbuf = memoryview(buf) while got < len(buf): # next vol needed? - if self.cur_avail == 0: + if self._cur_avail == 0: if not self._open_next(): break # length for next read cnt = len(buf) - got - if cnt > self.cur_avail: - cnt = self.cur_avail + if cnt > self._cur_avail: + cnt = self._cur_avail # read into temp view - res = self.fd.readinto(vbuf[got : got + cnt]) + res = self._fd.readinto(vbuf[got : got + cnt]) if not res: break - if self.crc_check: - self.CRC = crc32(vbuf[got : got + res], self.CRC) - self.cur_avail -= res - self.remain -= res + self._md_context.update(vbuf[got : got + res]) + self._cur_avail -= res + self._remain -= res got += res return got @@ -1708,10 +2350,12 @@ def __init__(self, f, key, iv): self.buf = EMPTY def tell(self): + """Current file pos - works only on block boundaries.""" return self.f.tell() def read(self, cnt=None): - if cnt > 8*1024: + """Read and decrypt.""" + if cnt > 8 * 1024: raise BadRarFile('Bad count to header decrypt - wrong password?') # consume old data @@ -1724,10 +2368,10 @@ def read(self, cnt=None): cnt -= len(res) # decrypt new data - BLK = self.ciph.block_size + blklen = 16 while cnt > 0: - enc = self.f.read(BLK) - if len(enc) < BLK: + enc = self.f.read(blklen) + if len(enc) < blklen: break dec = self.ciph.decrypt(enc) if cnt >= len(dec): @@ -1740,10 +2384,14 @@ def read(self, cnt=None): return res + # handle (filename|filelike) object class XFile(object): + """Input may be filename or file object. + """ __slots__ = ('_fd', '_need_close') - def __init__(self, xfile, bufsize = 1024): + + def __init__(self, xfile, bufsize=1024): if is_filelike(xfile): self._need_close = False self._fd = xfile @@ -1751,27 +2399,279 @@ def __init__(self, xfile, bufsize = 1024): else: self._need_close = True self._fd = open(xfile, 'rb', bufsize) + def read(self, n=None): + """Read from file.""" return self._fd.read(n) + def tell(self): + """Return file pos.""" return self._fd.tell() + def seek(self, ofs, whence=0): + """Move file pos.""" return self._fd.seek(ofs, whence) + def readinto(self, dst): + """Read into buffer.""" return self._fd.readinto(dst) + def close(self): + """Close file object.""" if self._need_close: self._fd.close() + def __enter__(self): return self + def __exit__(self, typ, val, tb): self.close() + +class NoHashContext(object): + """No-op hash function.""" + def __init__(self, data=None): + """Initialize""" + def update(self, data): + """Update data""" + def digest(self): + """Final hash""" + def hexdigest(self): + """Hexadecimal digest.""" + + +class CRC32Context(object): + """Hash context that uses CRC32.""" + __slots__ = ['_crc'] + + def __init__(self, data=None): + self._crc = 0 + if data: + self.update(data) + + def update(self, data): + """Process data.""" + self._crc = rar_crc32(data, self._crc) + + def digest(self): + """Final hash.""" + return self._crc + + def hexdigest(self): + """Hexadecimal digest.""" + return '%08x' % self.digest() + + +class Blake2SP(object): + """Blake2sp hash context. + """ + __slots__ = ['_thread', '_buf', '_cur', '_digest'] + digest_size = 32 + block_size = 64 + parallelism = 8 + + def __init__(self, data=None): + self._buf = b'' + self._cur = 0 + self._digest = None + self._thread = [] + + for i in range(self.parallelism): + ctx = self._blake2s(i, 0, i == (self.parallelism - 1)) + self._thread.append(ctx) + + if data: + self.update(data) + + def _blake2s(self, ofs, depth, is_last): + return blake2s(node_offset=ofs, node_depth=depth, last_node=is_last, + depth=2, inner_size=32, fanout=self.parallelism) + + def _add_block(self, blk): + self._thread[self._cur].update(blk) + self._cur = (self._cur + 1) % self.parallelism + + def update(self, data): + """Hash data. + """ + view = memoryview(data) + bs = self.block_size + if self._buf: + need = bs - len(self._buf) + if len(view) < need: + self._buf += view.tobytes() + return + self._add_block(self._buf + view[:need].tobytes()) + view = view[need:] + while len(view) >= bs: + self._add_block(view[:bs]) + view = view[bs:] + self._buf = view.tobytes() + + def digest(self): + """Return final digest value. + """ + if self._digest is None: + if self._buf: + self._add_block(self._buf) + self._buf = EMPTY + ctx = self._blake2s(0, 1, True) + for t in self._thread: + ctx.update(t.digest()) + self._digest = ctx.digest() + return self._digest + + def hexdigest(self): + """Hexadecimal digest.""" + return tohex(self.digest()) + ## ## Utility functions ## +S_LONG = Struct(' len(buf): + raise BadRarFile('cannot load byte') + return S_BYTE.unpack_from(buf, pos)[0], end + +def load_le32(buf, pos): + """Load little-endian 32-bit integer""" + end = pos + 4 + if end > len(buf): + raise BadRarFile('cannot load le32') + return S_LONG.unpack_from(buf, pos)[0], pos + 4 + +def load_bytes(buf, num, pos): + """Load sequence of bytes""" + end = pos + num + if end > len(buf): + raise BadRarFile('cannot load bytes') + return buf[pos : end], end + +def load_vstr(buf, pos): + """Load bytes prefixed by vint length""" + slen, pos = load_vint(buf, pos) + return load_bytes(buf, slen, pos) + +def load_dostime(buf, pos): + """Load LE32 dos timestamp""" + stamp, pos = load_le32(buf, pos) + tup = parse_dos_time(stamp) + return to_datetime(tup), pos + +def load_unixtime(buf, pos): + """Load LE32 unix timestamp""" + secs, pos = load_le32(buf, pos) + dt = datetime.fromtimestamp(secs, UTC) + return dt, pos + +def load_windowstime(buf, pos): + """Load LE64 windows timestamp""" + # unix epoch (1970) in seconds from windows epoch (1601) + unix_epoch = 11644473600 + val1, pos = load_le32(buf, pos) + val2, pos = load_le32(buf, pos) + secs, n1secs = divmod((val2 << 32) | val1, 10000000) + dt = datetime.fromtimestamp(secs - unix_epoch, UTC) + dt = dt.replace(microsecond=n1secs // 10) + return dt, pos + +# new-style next volume +def _next_newvol(volfile): + i = len(volfile) - 1 + while i >= 0: + if volfile[i] >= '0' and volfile[i] <= '9': + return _inc_volname(volfile, i) + i -= 1 + raise BadRarName("Cannot construct volume name: " + volfile) + +# old-style next volume +def _next_oldvol(volfile): + # rar -> r00 + if volfile[-4:].lower() == '.rar': + return volfile[:-2] + '00' + return _inc_volname(volfile, len(volfile) - 1) + +# increase digits with carry, otherwise just increment char +def _inc_volname(volfile, i): + fn = list(volfile) + while i >= 0: + if fn[i] != '9': + fn[i] = chr(ord(fn[i]) + 1) + break + fn[i] = '0' + i -= 1 + return ''.join(fn) + +# rar3 extended time fields +def _parse_ext_time(h, data, pos): + # flags and rest of data can be missing + flags = 0 + if pos + 2 <= len(data): + flags = S_SHORT.unpack_from(data, pos)[0] + pos += 2 + + mtime, pos = _parse_xtime(flags >> 3 * 4, data, pos, h.mtime) + h.ctime, pos = _parse_xtime(flags >> 2 * 4, data, pos) + h.atime, pos = _parse_xtime(flags >> 1 * 4, data, pos) + h.arctime, pos = _parse_xtime(flags >> 0 * 4, data, pos) + if mtime: + h.mtime = mtime + h.date_time = mtime.timetuple()[:6] + return pos + +# rar3 one extended time field +def _parse_xtime(flag, data, pos, basetime=None): + res = None + if flag & 8: + if not basetime: + basetime, pos = load_dostime(data, pos) + + # load second fractions + rem = 0 + cnt = flag & 3 + for _ in range(cnt): + b, pos = load_byte(data, pos) + rem = (b << 16) | (rem >> 8) + + # convert 100ns units to microseconds + usec = rem // 10 + if usec > 1000000: + usec = 999999 + + # dostime has room for 30 seconds only, correct if needed + if flag & 4 and basetime.second < 59: + res = basetime.replace(microsecond=usec, second=basetime.second + 1) + else: + res = basetime.replace(microsecond=usec) + return res, pos + def is_filelike(obj): + """Filename or file object? + """ if isinstance(obj, str) or isinstance(obj, unicode): return False res = True @@ -1782,14 +2682,16 @@ def is_filelike(obj): return True def rar3_s2k(psw, salt): - """String-to-key hash for RAR3.""" - + """String-to-key hash for RAR3. + """ + if not isinstance(psw, unicode): + psw = psw.decode('utf8') seed = psw.encode('utf-16le') + salt iv = EMPTY h = sha1() for i in range(16): for j in range(0x4000): - cnt = S_LONG.pack(i*0x4000 + j) + cnt = S_LONG.pack(i * 0x4000 + j) h.update(seed + cnt[:3]) if j == 0: iv += h.digest()[19:20] @@ -1797,12 +2699,11 @@ def rar3_s2k(psw, salt): key_le = pack("LLLL", key_be)) return key_le, iv -def rar_decompress(vers, meth, data, declen=0, flags=0, crc=0, psw=None, salt=None): +def rar3_decompress(vers, meth, data, declen=0, flags=0, crc=0, psw=None, salt=None): """Decompress blob of compressed data. Used for data with non-standard header - eg. comments. """ - # already uncompressed? if meth == RAR_M0 and (flags & RAR_FILE_PASSWORD) == 0: return data @@ -1826,11 +2727,11 @@ def rar_decompress(vers, meth, data, declen=0, flags=0, crc=0, psw=None, salt=No # full header hlen = S_BLK_HDR.size + len(fhdr) hdr = S_BLK_HDR.pack(0, RAR_BLOCK_FILE, flags, hlen) + fhdr - hcrc = crc32(hdr[2:]) & 0xFFFF + hcrc = rar_crc32(hdr[2:]) & 0xFFFF hdr = S_BLK_HDR.pack(hcrc, RAR_BLOCK_FILE, flags, hlen) + fhdr # archive main header - mh = S_BLK_HDR.pack(0x90CF, RAR_BLOCK_MAIN, 0, 13) + ZERO * (2+4) + mh = S_BLK_HDR.pack(0x90CF, RAR_BLOCK_MAIN, 0, 13) + ZERO * (2 + 4) # decompress via temp rar tmpfd, tmpname = mkstemp(suffix='.rar') @@ -1850,62 +2751,66 @@ def rar_decompress(vers, meth, data, declen=0, flags=0, crc=0, psw=None, salt=No os.unlink(tmpname) def to_datetime(t): - """Convert 6-part time tuple into datetime object.""" - + """Convert 6-part time tuple into datetime object. + """ if t is None: return None # extract values - year, mon, day, h, m, xs = t - s = int(xs) - us = int(1000000 * (xs - s)) + year, mon, day, h, m, s = t # assume the values are valid try: - return datetime(year, mon, day, h, m, s, us) + return datetime(year, mon, day, h, m, s) except ValueError: pass # sanitize invalid values - MDAY = (0, 31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31) - if mon < 1: mon = 1 - if mon > 12: mon = 12 - if day < 1: day = 1 - if day > MDAY[mon]: day = MDAY[mon] - if h > 23: h = 23 - if m > 59: m = 59 - if s > 59: s = 59 + mday = (0, 31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31) + if mon < 1: + mon = 1 + if mon > 12: + mon = 12 + if day < 1: + day = 1 + if day > mday[mon]: + day = mday[mon] + if h > 23: + h = 23 + if m > 59: + m = 59 + if s > 59: + s = 59 if mon == 2 and day == 29: try: - return datetime(year, mon, day, h, m, s, us) + return datetime(year, mon, day, h, m, s) except ValueError: day = 28 - return datetime(year, mon, day, h, m, s, us) + return datetime(year, mon, day, h, m, s) def parse_dos_time(stamp): - """Parse standard 32-bit DOS timestamp.""" - - sec = stamp & 0x1F; stamp = stamp >> 5 - min = stamp & 0x3F; stamp = stamp >> 6 - hr = stamp & 0x1F; stamp = stamp >> 5 - day = stamp & 0x1F; stamp = stamp >> 5 - mon = stamp & 0x0F; stamp = stamp >> 4 + """Parse standard 32-bit DOS timestamp. + """ + sec, stamp = stamp & 0x1F, stamp >> 5 + mn, stamp = stamp & 0x3F, stamp >> 6 + hr, stamp = stamp & 0x1F, stamp >> 5 + day, stamp = stamp & 0x1F, stamp >> 5 + mon, stamp = stamp & 0x0F, stamp >> 4 yr = (stamp & 0x7F) + 1980 - return (yr, mon, day, hr, min, sec * 2) + return (yr, mon, day, hr, mn, sec * 2) def custom_popen(cmd): - """Disconnect cmd from parent fds, read only from stdout.""" - + """Disconnect cmd from parent fds, read only from stdout. + """ # needed for py2exe creationflags = 0 if sys.platform == 'win32': - creationflags = 0x08000000 # CREATE_NO_WINDOW + creationflags = 0x08000000 # CREATE_NO_WINDOW # run command try: - p = Popen(cmd, bufsize = 0, - stdout = PIPE, stdin = PIPE, stderr = STDOUT, - creationflags = creationflags) + p = Popen(cmd, bufsize=0, stdout=PIPE, stdin=PIPE, stderr=STDOUT, + creationflags=creationflags) except OSError as ex: if ex.errno == errno.ENOENT: raise RarCannotExec("Unrar not installed? (rarfile.UNRAR_TOOL=%r)" % UNRAR_TOOL) @@ -1913,15 +2818,17 @@ def custom_popen(cmd): return p def custom_check(cmd, ignore_retcode=False): - """Run command, collect output, raise error if needed.""" + """Run command, collect output, raise error if needed. + """ p = custom_popen(cmd) - out, err = p.communicate() + out, _ = p.communicate() if p.returncode and not ignore_retcode: raise RarExecError("Check-run failed") return out -def add_password_arg(cmd, psw, required=False): - """Append password switch to commandline.""" +def add_password_arg(cmd, psw, ___required=False): + """Append password switch to commandline. + """ if UNRAR_TOOL == ALT_TOOL: return if psw is not None: @@ -1930,17 +2837,17 @@ def add_password_arg(cmd, psw, required=False): cmd.append('-p-') def check_returncode(p, out): - """Raise exception according to unrar exit code""" - + """Raise exception according to unrar exit code. + """ code = p.returncode if code == 0: return - # map return code to exception class + # map return code to exception class, codes from rar.txt errmap = [None, - RarWarning, RarFatalError, RarCRCError, RarLockedArchiveError, - RarWriteError, RarOpenError, RarUserError, RarMemoryError, - RarCreateError, RarNoFilesError] # codes from rar.txt + RarWarning, RarFatalError, RarCRCError, RarLockedArchiveError, # 1..4 + RarWriteError, RarOpenError, RarUserError, RarMemoryError, # 5..8 + RarCreateError, RarNoFilesError, RarWrongPassword] # 9..11 if UNRAR_TOOL == ALT_TOOL: errmap = [None] if code > 0 and code < len(errmap): @@ -1960,43 +2867,85 @@ def check_returncode(p, out): raise exc(msg) +def hmac_sha256(key, data): + """HMAC-SHA256""" + return HMAC(key, data, sha256).digest() + def membuf_tempfile(memfile): + """Write in-memory file object to real file.""" memfile.seek(0, 0) tmpfd, tmpname = mkstemp(suffix='.rar') tmpf = os.fdopen(tmpfd, "wb") try: - BSIZE = 32*1024 while True: buf = memfile.read(BSIZE) if not buf: break tmpf.write(buf) tmpf.close() - return tmpname except: tmpf.close() os.unlink(tmpname) raise + return tmpname + +class XTempFile(object): + """Real file for archive. + """ + __slots__ = ('_tmpfile', '_filename') + + def __init__(self, rarfile): + if is_filelike(rarfile): + self._tmpfile = membuf_tempfile(rarfile) + self._filename = self._tmpfile + else: + self._tmpfile = None + self._filename = rarfile + + def __enter__(self): + return self._filename + + def __exit__(self, exc_type, exc_value, tb): + if self._tmpfile: + try: + os.unlink(self._tmpfile) + except OSError: + pass + self._tmpfile = None # # Check if unrar works # -try: - # does UNRAR_TOOL work? - custom_check([UNRAR_TOOL], True) -except RarCannotExec: +ORIG_UNRAR_TOOL = UNRAR_TOOL +ORIG_OPEN_ARGS = OPEN_ARGS +ORIG_EXTRACT_ARGS = EXTRACT_ARGS +ORIG_TEST_ARGS = TEST_ARGS + +def _check_unrar_tool(): + global UNRAR_TOOL, OPEN_ARGS, EXTRACT_ARGS, TEST_ARGS try: - # does ALT_TOOL work? - custom_check([ALT_TOOL] + list(ALT_CHECK_ARGS), True) - # replace config - UNRAR_TOOL = ALT_TOOL - OPEN_ARGS = ALT_OPEN_ARGS - EXTRACT_ARGS = ALT_EXTRACT_ARGS - TEST_ARGS = ALT_TEST_ARGS + # does UNRAR_TOOL work? + custom_check([ORIG_UNRAR_TOOL], True) + + UNRAR_TOOL = ORIG_UNRAR_TOOL + OPEN_ARGS = ORIG_OPEN_ARGS + EXTRACT_ARGS = ORIG_EXTRACT_ARGS + TEST_ARGS = ORIG_TEST_ARGS except RarCannotExec: - # no usable tool, only uncompressed archives work - pass + try: + # does ALT_TOOL work? + custom_check([ALT_TOOL] + list(ALT_CHECK_ARGS), True) + # replace config + UNRAR_TOOL = ALT_TOOL + OPEN_ARGS = ALT_OPEN_ARGS + EXTRACT_ARGS = ALT_EXTRACT_ARGS + TEST_ARGS = ALT_TEST_ARGS + except RarCannotExec: + # no usable tool, only uncompressed archives work + pass + +_check_unrar_tool() diff --git a/lib/scandir.py b/lib/scandir.py new file mode 100644 index 0000000000..15b9fe7d3c --- /dev/null +++ b/lib/scandir.py @@ -0,0 +1,671 @@ +"""scandir, a better directory iterator and faster os.walk(), now in the Python 3.5 stdlib + +scandir() is a generator version of os.listdir() that returns an +iterator over files in a directory, and also exposes the extra +information most OSes provide while iterating files in a directory +(such as type and stat information). + +This module also includes a version of os.walk() that uses scandir() +to speed it up significantly. + +See README.md or https://github.com/benhoyt/scandir for rationale and +docs, or read PEP 471 (https://www.python.org/dev/peps/pep-0471/) for +more details on its inclusion into Python 3.5 + +scandir is released under the new BSD 3-clause license. See +LICENSE.txt for the full license text. +""" + +from __future__ import division + +from errno import ENOENT +from os import listdir, lstat, stat, strerror +from os.path import join, islink +from stat import S_IFDIR, S_IFLNK, S_IFREG +import collections +import os +import sys + +try: + import _scandir +except ImportError: + _scandir = None + +try: + import ctypes +except ImportError: + ctypes = None + +if _scandir is None and ctypes is None: + import warnings + warnings.warn("scandir can't find the compiled _scandir C module " + "or ctypes, using slow generic fallback") + +__version__ = '1.5' +__all__ = ['scandir', 'walk'] + +# Windows FILE_ATTRIBUTE constants for interpreting the +# FIND_DATA.dwFileAttributes member +FILE_ATTRIBUTE_ARCHIVE = 32 +FILE_ATTRIBUTE_COMPRESSED = 2048 +FILE_ATTRIBUTE_DEVICE = 64 +FILE_ATTRIBUTE_DIRECTORY = 16 +FILE_ATTRIBUTE_ENCRYPTED = 16384 +FILE_ATTRIBUTE_HIDDEN = 2 +FILE_ATTRIBUTE_INTEGRITY_STREAM = 32768 +FILE_ATTRIBUTE_NORMAL = 128 +FILE_ATTRIBUTE_NOT_CONTENT_INDEXED = 8192 +FILE_ATTRIBUTE_NO_SCRUB_DATA = 131072 +FILE_ATTRIBUTE_OFFLINE = 4096 +FILE_ATTRIBUTE_READONLY = 1 +FILE_ATTRIBUTE_REPARSE_POINT = 1024 +FILE_ATTRIBUTE_SPARSE_FILE = 512 +FILE_ATTRIBUTE_SYSTEM = 4 +FILE_ATTRIBUTE_TEMPORARY = 256 +FILE_ATTRIBUTE_VIRTUAL = 65536 + +IS_PY3 = sys.version_info >= (3, 0) + +if IS_PY3: + unicode = str # Because Python <= 3.2 doesn't have u'unicode' syntax + + +class GenericDirEntry(object): + __slots__ = ('name', '_stat', '_lstat', '_scandir_path', '_path') + + def __init__(self, scandir_path, name): + self._scandir_path = scandir_path + self.name = name + self._stat = None + self._lstat = None + self._path = None + + @property + def path(self): + if self._path is None: + self._path = join(self._scandir_path, self.name) + return self._path + + def stat(self, follow_symlinks=True): + if follow_symlinks: + if self._stat is None: + self._stat = stat(self.path) + return self._stat + else: + if self._lstat is None: + self._lstat = lstat(self.path) + return self._lstat + + def is_dir(self, follow_symlinks=True): + try: + st = self.stat(follow_symlinks=follow_symlinks) + except OSError as e: + if e.errno != ENOENT: + raise + return False # Path doesn't exist or is a broken symlink + return st.st_mode & 0o170000 == S_IFDIR + + def is_file(self, follow_symlinks=True): + try: + st = self.stat(follow_symlinks=follow_symlinks) + except OSError as e: + if e.errno != ENOENT: + raise + return False # Path doesn't exist or is a broken symlink + return st.st_mode & 0o170000 == S_IFREG + + def is_symlink(self): + try: + st = self.stat(follow_symlinks=False) + except OSError as e: + if e.errno != ENOENT: + raise + return False # Path doesn't exist or is a broken symlink + return st.st_mode & 0o170000 == S_IFLNK + + def inode(self): + st = self.stat(follow_symlinks=False) + return st.st_ino + + def __str__(self): + return '<{0}: {1!r}>'.format(self.__class__.__name__, self.name) + + __repr__ = __str__ + + +def _scandir_generic(path=unicode('.')): + """Like os.listdir(), but yield DirEntry objects instead of returning + a list of names. + """ + for name in listdir(path): + yield GenericDirEntry(path, name) + + +if IS_PY3 and sys.platform == 'win32': + def scandir_generic(path=unicode('.')): + if isinstance(path, bytes): + raise TypeError("os.scandir() doesn't support bytes path on Windows, use Unicode instead") + return _scandir_generic(path) + scandir_generic.__doc__ = _scandir_generic.__doc__ +else: + scandir_generic = _scandir_generic + + +scandir_c = None +scandir_python = None + + +if sys.platform == 'win32': + if ctypes is not None: + from ctypes import wintypes + + # Various constants from windows.h + INVALID_HANDLE_VALUE = ctypes.c_void_p(-1).value + ERROR_FILE_NOT_FOUND = 2 + ERROR_NO_MORE_FILES = 18 + IO_REPARSE_TAG_SYMLINK = 0xA000000C + + # Numer of seconds between 1601-01-01 and 1970-01-01 + SECONDS_BETWEEN_EPOCHS = 11644473600 + + kernel32 = ctypes.windll.kernel32 + + # ctypes wrappers for (wide string versions of) FindFirstFile, + # FindNextFile, and FindClose + FindFirstFile = kernel32.FindFirstFileW + FindFirstFile.argtypes = [ + wintypes.LPCWSTR, + ctypes.POINTER(wintypes.WIN32_FIND_DATAW), + ] + FindFirstFile.restype = wintypes.HANDLE + + FindNextFile = kernel32.FindNextFileW + FindNextFile.argtypes = [ + wintypes.HANDLE, + ctypes.POINTER(wintypes.WIN32_FIND_DATAW), + ] + FindNextFile.restype = wintypes.BOOL + + FindClose = kernel32.FindClose + FindClose.argtypes = [wintypes.HANDLE] + FindClose.restype = wintypes.BOOL + + Win32StatResult = collections.namedtuple('Win32StatResult', [ + 'st_mode', + 'st_ino', + 'st_dev', + 'st_nlink', + 'st_uid', + 'st_gid', + 'st_size', + 'st_atime', + 'st_mtime', + 'st_ctime', + 'st_atime_ns', + 'st_mtime_ns', + 'st_ctime_ns', + 'st_file_attributes', + ]) + + def filetime_to_time(filetime): + """Convert Win32 FILETIME to time since Unix epoch in seconds.""" + total = filetime.dwHighDateTime << 32 | filetime.dwLowDateTime + return total / 10000000 - SECONDS_BETWEEN_EPOCHS + + def find_data_to_stat(data): + """Convert Win32 FIND_DATA struct to stat_result.""" + # First convert Win32 dwFileAttributes to st_mode + attributes = data.dwFileAttributes + st_mode = 0 + if attributes & FILE_ATTRIBUTE_DIRECTORY: + st_mode |= S_IFDIR | 0o111 + else: + st_mode |= S_IFREG + if attributes & FILE_ATTRIBUTE_READONLY: + st_mode |= 0o444 + else: + st_mode |= 0o666 + if (attributes & FILE_ATTRIBUTE_REPARSE_POINT and + data.dwReserved0 == IO_REPARSE_TAG_SYMLINK): + st_mode ^= st_mode & 0o170000 + st_mode |= S_IFLNK + + st_size = data.nFileSizeHigh << 32 | data.nFileSizeLow + st_atime = filetime_to_time(data.ftLastAccessTime) + st_mtime = filetime_to_time(data.ftLastWriteTime) + st_ctime = filetime_to_time(data.ftCreationTime) + + # Some fields set to zero per CPython's posixmodule.c: st_ino, st_dev, + # st_nlink, st_uid, st_gid + return Win32StatResult(st_mode, 0, 0, 0, 0, 0, st_size, + st_atime, st_mtime, st_ctime, + int(st_atime * 1000000000), + int(st_mtime * 1000000000), + int(st_ctime * 1000000000), + attributes) + + class Win32DirEntryPython(object): + __slots__ = ('name', '_stat', '_lstat', '_find_data', '_scandir_path', '_path', '_inode') + + def __init__(self, scandir_path, name, find_data): + self._scandir_path = scandir_path + self.name = name + self._stat = None + self._lstat = None + self._find_data = find_data + self._path = None + self._inode = None + + @property + def path(self): + if self._path is None: + self._path = join(self._scandir_path, self.name) + return self._path + + def stat(self, follow_symlinks=True): + if follow_symlinks: + if self._stat is None: + if self.is_symlink(): + # It's a symlink, call link-following stat() + self._stat = stat(self.path) + else: + # Not a symlink, stat is same as lstat value + if self._lstat is None: + self._lstat = find_data_to_stat(self._find_data) + self._stat = self._lstat + return self._stat + else: + if self._lstat is None: + # Lazily convert to stat object, because it's slow + # in Python, and often we only need is_dir() etc + self._lstat = find_data_to_stat(self._find_data) + return self._lstat + + def is_dir(self, follow_symlinks=True): + is_symlink = self.is_symlink() + if follow_symlinks and is_symlink: + try: + return self.stat().st_mode & 0o170000 == S_IFDIR + except OSError as e: + if e.errno != ENOENT: + raise + return False + elif is_symlink: + return False + else: + return (self._find_data.dwFileAttributes & + FILE_ATTRIBUTE_DIRECTORY != 0) + + def is_file(self, follow_symlinks=True): + is_symlink = self.is_symlink() + if follow_symlinks and is_symlink: + try: + return self.stat().st_mode & 0o170000 == S_IFREG + except OSError as e: + if e.errno != ENOENT: + raise + return False + elif is_symlink: + return False + else: + return (self._find_data.dwFileAttributes & + FILE_ATTRIBUTE_DIRECTORY == 0) + + def is_symlink(self): + return (self._find_data.dwFileAttributes & + FILE_ATTRIBUTE_REPARSE_POINT != 0 and + self._find_data.dwReserved0 == IO_REPARSE_TAG_SYMLINK) + + def inode(self): + if self._inode is None: + self._inode = lstat(self.path).st_ino + return self._inode + + def __str__(self): + return '<{0}: {1!r}>'.format(self.__class__.__name__, self.name) + + __repr__ = __str__ + + def win_error(error, filename): + exc = WindowsError(error, ctypes.FormatError(error)) + exc.filename = filename + return exc + + def _scandir_python(path=unicode('.')): + """Like os.listdir(), but yield DirEntry objects instead of returning + a list of names. + """ + # Call FindFirstFile and handle errors + if isinstance(path, bytes): + is_bytes = True + filename = join(path.decode('mbcs', 'strict'), '*.*') + else: + is_bytes = False + filename = join(path, '*.*') + data = wintypes.WIN32_FIND_DATAW() + data_p = ctypes.byref(data) + handle = FindFirstFile(filename, data_p) + if handle == INVALID_HANDLE_VALUE: + error = ctypes.GetLastError() + if error == ERROR_FILE_NOT_FOUND: + # No files, don't yield anything + return + raise win_error(error, path) + + # Call FindNextFile in a loop, stopping when no more files + try: + while True: + # Skip '.' and '..' (current and parent directory), but + # otherwise yield (filename, stat_result) tuple + name = data.cFileName + if name not in ('.', '..'): + if is_bytes: + name = name.encode('mbcs', 'replace') + yield Win32DirEntryPython(path, name, data) + + data = wintypes.WIN32_FIND_DATAW() + data_p = ctypes.byref(data) + success = FindNextFile(handle, data_p) + if not success: + error = ctypes.GetLastError() + if error == ERROR_NO_MORE_FILES: + break + raise win_error(error, path) + finally: + if not FindClose(handle): + raise win_error(ctypes.GetLastError(), path) + + if IS_PY3: + def scandir_python(path=unicode('.')): + if isinstance(path, bytes): + raise TypeError("os.scandir() doesn't support bytes path on Windows, use Unicode instead") + return _scandir_python(path) + scandir_python.__doc__ = _scandir_python.__doc__ + else: + scandir_python = _scandir_python + + if _scandir is not None: + scandir_c = _scandir.scandir + + if _scandir is not None: + scandir = scandir_c + elif ctypes is not None: + scandir = scandir_python + else: + scandir = scandir_generic + + +# Linux, OS X, and BSD implementation +elif sys.platform.startswith(('linux', 'darwin', 'sunos5')) or 'bsd' in sys.platform: + have_dirent_d_type = (sys.platform != 'sunos5') + + if ctypes is not None and have_dirent_d_type: + import ctypes.util + + DIR_p = ctypes.c_void_p + + # Rather annoying how the dirent struct is slightly different on each + # platform. The only fields we care about are d_name and d_type. + class Dirent(ctypes.Structure): + if sys.platform.startswith('linux'): + _fields_ = ( + ('d_ino', ctypes.c_ulong), + ('d_off', ctypes.c_long), + ('d_reclen', ctypes.c_ushort), + ('d_type', ctypes.c_byte), + ('d_name', ctypes.c_char * 256), + ) + else: + _fields_ = ( + ('d_ino', ctypes.c_uint32), # must be uint32, not ulong + ('d_reclen', ctypes.c_ushort), + ('d_type', ctypes.c_byte), + ('d_namlen', ctypes.c_byte), + ('d_name', ctypes.c_char * 256), + ) + + DT_UNKNOWN = 0 + DT_DIR = 4 + DT_REG = 8 + DT_LNK = 10 + + Dirent_p = ctypes.POINTER(Dirent) + Dirent_pp = ctypes.POINTER(Dirent_p) + + libc = ctypes.CDLL(ctypes.util.find_library('c'), use_errno=True) + opendir = libc.opendir + opendir.argtypes = [ctypes.c_char_p] + opendir.restype = DIR_p + + readdir_r = libc.readdir_r + readdir_r.argtypes = [DIR_p, Dirent_p, Dirent_pp] + readdir_r.restype = ctypes.c_int + + closedir = libc.closedir + closedir.argtypes = [DIR_p] + closedir.restype = ctypes.c_int + + file_system_encoding = sys.getfilesystemencoding() + + class PosixDirEntry(object): + __slots__ = ('name', '_d_type', '_stat', '_lstat', '_scandir_path', '_path', '_inode') + + def __init__(self, scandir_path, name, d_type, inode): + self._scandir_path = scandir_path + self.name = name + self._d_type = d_type + self._inode = inode + self._stat = None + self._lstat = None + self._path = None + + @property + def path(self): + if self._path is None: + self._path = join(self._scandir_path, self.name) + return self._path + + def stat(self, follow_symlinks=True): + if follow_symlinks: + if self._stat is None: + if self.is_symlink(): + self._stat = stat(self.path) + else: + if self._lstat is None: + self._lstat = lstat(self.path) + self._stat = self._lstat + return self._stat + else: + if self._lstat is None: + self._lstat = lstat(self.path) + return self._lstat + + def is_dir(self, follow_symlinks=True): + if (self._d_type == DT_UNKNOWN or + (follow_symlinks and self.is_symlink())): + try: + st = self.stat(follow_symlinks=follow_symlinks) + except OSError as e: + if e.errno != ENOENT: + raise + return False + return st.st_mode & 0o170000 == S_IFDIR + else: + return self._d_type == DT_DIR + + def is_file(self, follow_symlinks=True): + if (self._d_type == DT_UNKNOWN or + (follow_symlinks and self.is_symlink())): + try: + st = self.stat(follow_symlinks=follow_symlinks) + except OSError as e: + if e.errno != ENOENT: + raise + return False + return st.st_mode & 0o170000 == S_IFREG + else: + return self._d_type == DT_REG + + def is_symlink(self): + if self._d_type == DT_UNKNOWN: + try: + st = self.stat(follow_symlinks=False) + except OSError as e: + if e.errno != ENOENT: + raise + return False + return st.st_mode & 0o170000 == S_IFLNK + else: + return self._d_type == DT_LNK + + def inode(self): + return self._inode + + def __str__(self): + return '<{0}: {1!r}>'.format(self.__class__.__name__, self.name) + + __repr__ = __str__ + + def posix_error(filename): + errno = ctypes.get_errno() + exc = OSError(errno, strerror(errno)) + exc.filename = filename + return exc + + def scandir_python(path=unicode('.')): + """Like os.listdir(), but yield DirEntry objects instead of returning + a list of names. + """ + if isinstance(path, bytes): + opendir_path = path + is_bytes = True + else: + opendir_path = path.encode(file_system_encoding) + is_bytes = False + dir_p = opendir(opendir_path) + if not dir_p: + raise posix_error(path) + try: + result = Dirent_p() + while True: + entry = Dirent() + if readdir_r(dir_p, entry, result): + raise posix_error(path) + if not result: + break + name = entry.d_name + if name not in (b'.', b'..'): + if not is_bytes: + name = name.decode(file_system_encoding) + yield PosixDirEntry(path, name, entry.d_type, entry.d_ino) + finally: + if closedir(dir_p): + raise posix_error(path) + + if _scandir is not None: + scandir_c = _scandir.scandir + + if _scandir is not None: + scandir = scandir_c + elif ctypes is not None: + scandir = scandir_python + else: + scandir = scandir_generic + + +# Some other system -- no d_type or stat information +else: + scandir = scandir_generic + + +def _walk(top, topdown=True, onerror=None, followlinks=False): + """Like Python 3.5's implementation of os.walk() -- faster than + the pre-Python 3.5 version as it uses scandir() internally. + """ + dirs = [] + nondirs = [] + + # We may not have read permission for top, in which case we can't + # get a list of the files the directory contains. os.walk + # always suppressed the exception then, rather than blow up for a + # minor reason when (say) a thousand readable directories are still + # left to visit. That logic is copied here. + try: + scandir_it = scandir(top) + except OSError as error: + if onerror is not None: + onerror(error) + return + + while True: + try: + try: + entry = next(scandir_it) + except StopIteration: + break + except OSError as error: + if onerror is not None: + onerror(error) + return + + try: + is_dir = entry.is_dir() + except OSError: + # If is_dir() raises an OSError, consider that the entry is not + # a directory, same behaviour than os.path.isdir(). + is_dir = False + + if is_dir: + dirs.append(entry.name) + else: + nondirs.append(entry.name) + + if not topdown and is_dir: + # Bottom-up: recurse into sub-directory, but exclude symlinks to + # directories if followlinks is False + if followlinks: + walk_into = True + else: + try: + is_symlink = entry.is_symlink() + except OSError: + # If is_symlink() raises an OSError, consider that the + # entry is not a symbolic link, same behaviour than + # os.path.islink(). + is_symlink = False + walk_into = not is_symlink + + if walk_into: + for entry in walk(entry.path, topdown, onerror, followlinks): + yield entry + + # Yield before recursion if going top down + if topdown: + yield top, dirs, nondirs + + # Recurse into sub-directories + for name in dirs: + new_path = join(top, name) + # Issue #23605: os.path.islink() is used instead of caching + # entry.is_symlink() result during the loop on os.scandir() because + # the caller can replace the directory entry during the "yield" + # above. + if followlinks or not islink(new_path): + for entry in walk(new_path, topdown, onerror, followlinks): + yield entry + else: + # Yield after recursion if going bottom up + yield top, dirs, nondirs + + +if IS_PY3 or sys.platform != 'win32': + walk = _walk +else: + # Fix for broken unicode handling on Windows on Python 2.x, see: + # https://github.com/benhoyt/scandir/issues/54 + file_system_encoding = sys.getfilesystemencoding() + + def walk(top, topdown=True, onerror=None, followlinks=False): + if isinstance(top, bytes): + top = top.decode(file_system_encoding) + return _walk(top, topdown, onerror, followlinks) diff --git a/medusa/post_processor.py b/medusa/post_processor.py index 7559078618..549af44898 100644 --- a/medusa/post_processor.py +++ b/medusa/post_processor.py @@ -16,7 +16,6 @@ # You should have received a copy of the GNU General Public License # along with Medusa. If not, see . """Post processor module.""" -import fnmatch import os import re import stat @@ -26,6 +25,11 @@ import adba +from pathlib2 import Path + +import rarfile +from rarfile import Error as RarError + from six import text_type from . import app, common, db, failed_history, helpers, history, logger, notifiers, show_name_helpers @@ -158,109 +162,49 @@ def _check_for_existing_file(self, existing_file): (existing_file), logger.DEBUG) return PostProcessor.DOESNT_EXIST - @staticmethod - def _search_files(path, pattern='*', subfolders=None, base_name_only=None, sort=False): - """ - Search for files in a given path. - - :param path: path to file or folder (folder paths must end with slashes) - :type path: text_type - :param pattern: pattern used to match the files - :type pattern: text_type - :param subfolders: search for files in subfolders - :type subfolders: bool - :param base_name_only: only match files with the same name - :type base_name_only: bool - :param sort: return files sorted by size - :type sort: bool - :return: list with found files or empty list - :rtype: list - """ - directory = os.path.dirname(path) - - if base_name_only: - if os.path.isfile(path): - new_pattern = os.path.basename(path).rpartition('.')[0] - elif os.path.isdir(path): - new_pattern = os.path.split(directory)[1] - else: - return [] - - if any(char in new_pattern for char in ['[', '?', '*']): - # Escaping is done by wrapping any of "*?[" between square brackets. - # Modified from: https://hg.python.org/cpython/file/tip/Lib/glob.py#l161 - if isinstance(new_pattern, bytes): - new_pattern = re.compile(b'([*?[])').sub(br'[\1]', new_pattern) - else: - new_pattern = re.compile('([*?[])').sub(r'[\1]', new_pattern) - - pattern = new_pattern + pattern - - found_files = [] - for root, __, filenames in os.walk(directory): - for filename in fnmatch.filter(filenames, pattern): - found_files.append(os.path.join(root, filename)) - if not subfolders: - break - - if sort: - found_files = sorted(found_files, key=os.path.getsize, reverse=True) - - return found_files - - def list_associated_files(self, file_path, base_name_only=False, subtitles_only=False, subfolders=False): + def list_associated_files(self, filepath, base_name_only=False, subtitles_only=False, subfolders=False): """ For a given file path search for files in the same directory and return their absolute paths. - :param file_path: The file to check for associated files - :param base_name_only: False add extra '.' (conservative search) to file_path minus extension + :param filepath: The file to check for associated files + :param base_name_only: list only files with the same basename :param subtitles_only: list only subtitles :param subfolders: check subfolders while listing files :return: A list containing all files which are associated to the given file """ + files = self._search_files(filepath, subfolders=subfolders, base_name_only=base_name_only) + # file path to the video file that is being processed (without extension) - processed_file_name = os.path.basename(file_path).rpartition('.')[0].lower() + processed_file_name = os.path.splitext(os.path.basename(filepath))[0].lower() - file_list = self._search_files(file_path, subfolders=subfolders, base_name_only=base_name_only) + processed_names = (processed_file_name,) + processed_names += filter(None, (self._rar_basename(filepath, files),)) # loop through all the files in the folder, and check if they are the same name # even when the cases don't match filelist = [] - rar_file = [os.path.basename(f).rpartition('.')[0].lower() for f in file_list - if helpers.get_extension(f).lower() == 'rar'] - for found_file in file_list: + for found_file in files: file_name = os.path.basename(found_file).lower() - if file_name.startswith(processed_file_name): - - # only add subtitles with valid languages to the list - if is_subtitle(found_file): - code = file_name.rsplit('.', 2)[1].replace('_', '-') - language = from_code(code, unknown='') or from_ietf_code(code, unknown='und') - if not language: - continue - - filelist.append(found_file) - # List associated files based on .RAR files like Show.101.720p-GROUP.nfo and Show.101.720p-GROUP.rar - elif any([file_name.startswith(r) for r in rar_file]): + if file_name.startswith(processed_names): filelist.append(found_file) file_path_list = [] extensions_to_delete = [] for associated_file_path in filelist: # Exclude the video file we are post-processing - if associated_file_path == file_path: - continue - - # Exlude non-subtitle files with the 'only subtitles' option - if subtitles_only and not is_subtitle(associated_file_path): + if associated_file_path == filepath: continue # Exclude .rar files from associated list if re.search(r'(^.+\.(rar|r\d+)$)', associated_file_path): continue + # Exlude non-subtitle files with the 'only subtitles' option + if subtitles_only and not is_subtitle(associated_file_path): + continue + # Add the extensions that the user doesn't allow to the 'extensions_to_delete' list if app.MOVE_ASSOCIATED_FILES: allowed_extensions = app.ALLOWED_EXTENSIONS.split(',') @@ -276,17 +220,73 @@ def list_associated_files(self, file_path, base_name_only=False, subtitles_only= if file_path_list: self._log(u'Found the following associated files for {0}: {1}'.format - (file_path, file_path_list), logger.DEBUG) + (filepath, file_path_list), logger.DEBUG) if extensions_to_delete: # Rebuild the 'file_path_list' list only with the extensions the user allows file_path_list = [associated_file for associated_file in file_path_list if associated_file not in extensions_to_delete] self._delete(extensions_to_delete) else: - self._log(u'No associated files for {0} were found during this pass'.format(file_path), logger.DEBUG) + self._log(u'No associated files for {0} were found during this pass'.format(filepath), logger.DEBUG) return file_path_list + @staticmethod + def _search_files(path, pattern='*', subfolders=None, base_name_only=None, sort=None): + """ + Search for files in a given path. + + :param path: path to file or folder (folder paths must end with slashes) + :type path: text_type + :param pattern: pattern used to match the files + :type pattern: text_type + :param subfolders: search for files in subfolders + :type subfolders: bool + :param base_name_only: only match files with the same name + :type base_name_only: bool + :param sort: return files sorted by size + :type sort: bool + :return: list with found files or empty list + :rtype: list + """ + directory = os.path.dirname(path) + + if base_name_only: + if os.path.isfile(path): + new_pattern = os.path.splitext(os.path.basename(path))[0] + elif os.path.isdir(path): + new_pattern = os.path.split(directory)[1] + else: + return [] + + pattern = new_pattern + pattern + + path = Path(directory) + glob = path.rglob(pattern) if subfolders else path.glob(pattern) + + files = [text_type(match) for match in glob] + + if sort: + files = sorted(files, key=os.path.getsize, reverse=True) + + return files + + @staticmethod + def _rar_basename(filepath, files): + """Return the basename of the source rar archive if found.""" + videofile = os.path.basename(filepath) + rars = (x for x in files if rarfile.is_rarfile(x)) + + for rar in rars: + try: + content = rarfile.RarFile(rar).namelist() + except RarError as e: + logger.log(u'An error occurred while reading the following RAR file: {name}. ' + u'Error: {message}'.format(name=rar, message=e), logger.WARNING) + continue + if videofile in content: + return os.path.splitext(os.path.basename(rar))[0] + def _delete(self, file_path, associated_files=False): """ Delete the file and optionally all associated files. @@ -361,38 +361,31 @@ def _combined_file_operation(self, file_path, new_path, new_base_name, associate (file_path), logger.DEBUG) return - # base name with file path (without extension and ending dot) - old_base_name = file_path.rpartition('.')[0] - old_base_name_length = len(old_base_name) - for cur_file_path in file_list: # remember if the extension changed changed_extension = None - # file extension without leading dot (for example: de.srt) - extension = cur_file_path[old_base_name_length + 1:] - # If basename is different, then is a RAR associated file. - if not extension: - helpers.get_extension(cur_file_path) + # file extension without leading dot + extension = helpers.get_extension(cur_file_path) # initally set current extension as new extension new_extension = extension - # split the extension in two parts. E.g.: ('de', '.srt') - split_extension = os.path.splitext(extension) - # check if it's a subtitle and also has a subtitle language - if is_subtitle(cur_file_path) and all(split_extension): - sub_lang = split_extension[0].lower() - if sub_lang == 'pt-br': - sub_lang = 'pt-BR' - new_extension = sub_lang + split_extension[1] - changed_extension = True - # If subtitle was downloaded from Medusa it can't be in the torrent folder, so we move it. - # Otherwise when torrent+data gets removed the folder won't be deleted because of subtitle + if is_subtitle(cur_file_path): + # If subtitle was downloaded from Medusa it can't be in the torrent folder, so we move it. + # Otherwise when torrent+data gets removed, the folder won't be deleted because of subtitle if app.POSTPONE_IF_NO_SUBS: - # subtitle_action = move + # subtitle_action = move action = subtitle_action or action + code = cur_file_path.rsplit('.', 2)[1].lower().replace('_', '-') + if from_code(code, unknown='') or from_ietf_code(code, unknown=''): + if code == 'pt-br': + code = 'pt-BR' + + new_extension = code + '.' + extension + changed_extension = True + # replace nfo with nfo-orig to avoid conflicts - if extension == 'nfo' and app.NFO_RENAME: + elif extension == 'nfo' and app.NFO_RENAME: new_extension = 'nfo-orig' changed_extension = True From be5b57e5c32f12d6cc74169779a34230dccc9373 Mon Sep 17 00:00:00 2001 From: X O Date: Thu, 23 Feb 2017 20:17:04 +1030 Subject: [PATCH 031/344] lint js (#2274) --- .gitignore | 4 +- package.json | 33 +- readme.md | 2 +- static/js/add-show-options.js | 5 +- static/js/add-shows/add-existing-show.js | 13 +- static/js/add-shows/init.js | 6 +- static/js/add-shows/new-show.js | 4 +- static/js/ajax-episode-search.js | 1 + static/js/ajax-notifications.js | 10 +- static/js/common/init.js | 7 +- static/js/config-providers.js | 19 +- static/js/config/index.js | 7 +- static/js/core.js | 6 +- static/js/errorlogs/viewlogs.js | 26 +- static/js/history/index.js | 28 +- static/js/home/display-show.js | 2 +- static/js/home/index.js | 8 +- static/js/home/restart.js | 1 + static/js/home/snatch-selection.js | 1 + static/js/manage/backlog-overview.js | 13 +- static/js/mass-update.js | 6 +- static/js/quality-chooser.js | 4 +- static/js/root-dirs.js | 1 + static/js/schedule/index.js | 4 +- views/displayShow.mako | 2 - yarn.lock | 4569 ++++++++++++++++++++++ 26 files changed, 4688 insertions(+), 94 deletions(-) create mode 100644 yarn.lock diff --git a/.gitignore b/.gitignore index bd9757ebbd..12a9e8d9f9 100644 --- a/.gitignore +++ b/.gitignore @@ -67,8 +67,8 @@ lib/unrar2/UnRAR.exe # Grunt # ###################### -.build/bower_components -.build/node_modules +**/bower_components +**/node_modules .build/dist .build/package.json diff --git a/package.json b/package.json index 250138fff5..7a9825d2dd 100644 --- a/package.json +++ b/package.json @@ -26,35 +26,50 @@ "load-grunt-tasks": "^3.3.0", "snyk": "^1.9.1", "stylelint": "^7.2.0", - "xo": "^0.16.0" + "xo": "^0.17.1" }, "xo": { "space": 4, "rules": { - "space-before-function-paren": ["error", "never"] + "space-before-function-paren": [ + "error", + "never" + ] }, "envs": [ "browser", "jquery" ], "globals": [ + "PNotify", + "LazyLoad", + "_", + "log", "MEDUSA", - "PNotify" + "api", + "apiKey", + "apiRoot" ], "ignores": [ "static/js/lib/**", "static/js/*.min.js", - "static/js/vender.js" + "static/js/vender.js", + "static/js/api.js" ] }, "stylelint": { "extends": "./.build/node_modules/stylelint-config-standard", "rules": { - "indentation": [ 4, { - "ignore": ["block"], - "message": "Please use 4 spaces for indentation. Tabs make OmgImAlexis sad.", - "severity": "error" - } ] + "indentation": [ + 4, + { + "ignore": [ + "block" + ], + "message": "Please use 4 spaces for indentation. Tabs make OmgImAlexis sad.", + "severity": "error" + } + ] } } } diff --git a/readme.md b/readme.md index 29eca90a3a..a524abfd02 100644 --- a/readme.md +++ b/readme.md @@ -1,6 +1,6 @@ ![Medusa](static/images/medusa-logo.png) -[![Build Status](https://travis-ci.org/pymedusa/Medusa.svg?branch=develop)](https://travis-ci.org/pymedusa/Medusa) [![Average time to resolve an issue](http://isitmaintained.com/badge/resolution/pymedusa/Medusa.svg)](http://isitmaintained.com/project/pymedusa/Medusa "Average time to resolve an issue") [![Percentage of issues still open](http://isitmaintained.com/badge/open/pymedusa/Medusa.svg)](http://isitmaintained.com/project/pymedusa/Medusa "Percentage of issues still open") [![Codacy Badge](https://api.codacy.com/project/badge/Grade/ade58b4469dd4b38bbbd681913d97bfc)](https://www.codacy.com/app/pymedusa/Medusa?utm_source=github.com&utm_medium=referral&utm_content=pymedusa/Medusa&utm_campaign=Badge_Grade) +[![Build Status](https://travis-ci.org/pymedusa/Medusa.svg?branch=develop)](https://travis-ci.org/pymedusa/Medusa) [![Average time to resolve an issue](http://isitmaintained.com/badge/resolution/pymedusa/Medusa.svg)](http://isitmaintained.com/project/pymedusa/Medusa "Average time to resolve an issue") [![Percentage of issues still open](http://isitmaintained.com/badge/open/pymedusa/Medusa.svg)](http://isitmaintained.com/project/pymedusa/Medusa "Percentage of issues still open") [![Codacy Badge](https://api.codacy.com/project/badge/Grade/ade58b4469dd4b38bbbd681913d97bfc)](https://www.codacy.com/app/pymedusa/Medusa?utm_source=github.com&utm_medium=referral&utm_content=pymedusa/Medusa&utm_campaign=Badge_Grade) [![XO code style](https://img.shields.io/badge/code_style-XO-5ed9c7.svg)](https://github.com/sindresorhus/xo) ===== Automatic Video Library Manager for TV Shows. It watches for new episodes of your favorite shows, and when they are posted it does its magic. diff --git a/static/js/add-show-options.js b/static/js/add-show-options.js index 886756ec39..fb295eca13 100644 --- a/static/js/add-show-options.js +++ b/static/js/add-show-options.js @@ -9,10 +9,11 @@ $(document).ready(function() { bestQualArray.push($(d).val()); }); + // @TODO: Move this to API $.get('config/general/saveAddShowDefaults', { defaultStatus: $('#statusSelect').val(), - allowed_qualities: anyQualArray.join(','), - preferred_qualities: bestQualArray.join(','), + allowed_qualities: anyQualArray.join(','), // eslint-disable-line camelcase + preferred_qualities: bestQualArray.join(','), // eslint-disable-line camelcase defaultFlattenFolders: $('#flatten_folders').prop('checked'), subtitles: $('#subtitles').prop('checked'), anime: $('#anime').prop('checked'), diff --git a/static/js/add-shows/add-existing-show.js b/static/js/add-shows/add-existing-show.js index 6d538a1e4e..09b7c95757 100644 --- a/static/js/add-shows/add-existing-show.js +++ b/static/js/add-shows/add-existing-show.js @@ -1,8 +1,8 @@ MEDUSA.addShows.addExistingShow = function() { $('#tableDiv').on('click', '#checkAll', function() { - var seasCheck = this; + var seasonCheck = this; $('.dirCheck').each(function() { - this.checked = seasCheck.checked; + this.checked = seasonCheck.checked; }); }); @@ -10,11 +10,10 @@ MEDUSA.addShows.addExistingShow = function() { var dirArr = []; $('.dirCheck').each(function() { if (this.checked === true) { - var show = $(this).attr('id'); - var originalIndexer = $(this).attr('data-indexer') - var indexerId = '|' + $(this).attr('data-indexer-id') - var showName = $(this).attr('data-show-name') - var showDir = $(this).attr('data-show-dir') + var originalIndexer = $(this).attr('data-indexer'); + var indexerId = '|' + $(this).attr('data-indexer-id'); + var showName = $(this).attr('data-show-name'); + var showDir = $(this).attr('data-show-dir'); var indexer = $(this).closest('tr').find('select').val(); if (originalIndexer !== indexer || originalIndexer === '0') { diff --git a/static/js/add-shows/init.js b/static/js/add-shows/init.js index 6958db8216..c5dd59fc95 100644 --- a/static/js/add-shows/init.js +++ b/static/js/add-shows/init.js @@ -61,7 +61,7 @@ MEDUSA.addShows.init = function() { rating: '[data-rating] parseInt', votes: '[data-votes] parseInt' } - }).on('layoutComplete arrangeComplete removeComplete', function () { + }).on('layoutComplete arrangeComplete removeComplete', function() { imgLazyLoad.update(); imgLazyLoad.handleScroll(); }); @@ -157,8 +157,8 @@ MEDUSA.addShows.init = function() { $.get('config/general/saveAddShowDefaults', { defaultStatus: $('#statusSelect').val(), - allowed_qualities: anyQualArray.join(','), - preferred_qualities: bestQualArray.join(','), + allowed_qualities: anyQualArray.join(','), // eslint-disable-line camelcase + preferred_qualities: bestQualArray.join(','), // eslint-disable-line camelcase defaultFlattenFolders: $('#flatten_folders').prop('checked'), subtitles: $('#subtitles').prop('checked'), anime: $('#anime').prop('checked'), diff --git a/static/js/add-shows/new-show.js b/static/js/add-shows/new-show.js index a1d3b73d56..b7490bab50 100644 --- a/static/js/add-shows/new-show.js +++ b/static/js/add-shows/new-show.js @@ -89,7 +89,7 @@ MEDUSA.addShows.newShow = function() { dataType: 'json', error: function() { $('#searchResults').empty().html('search timed out, try again or try another indexer'); - }, + } }).done(function(data) { var firstResult = true; var resultStr = '
    \nSearch Results:\n'; @@ -109,7 +109,7 @@ MEDUSA.addShows.newShow = function() { var whichSeries = obj.join('|'); resultStr += ' '; - if (data.langid && data.langid !== '' && obj[1] === 1) { //For now only add the language id to the tvdb url, as the others might have different routes.) + if (data.langid && data.langid !== '' && obj[1] === 1) { // For now only add the language id to the tvdb url, as the others might have different routes. resultStr += '' + obj[4] + ''; } else { resultStr += '' + obj[4] + ''; diff --git a/static/js/ajax-episode-search.js b/static/js/ajax-episode-search.js index 0e487046c5..40055b3fd1 100644 --- a/static/js/ajax-episode-search.js +++ b/static/js/ajax-episode-search.js @@ -167,6 +167,7 @@ $.ajaxEpSearch = function(options) { url += '&down_cur_quality=1'; } + // @TODO: Move to the API $.getJSON(url, function(data) { // if they failed then just put the red X if (data.result.toLowerCase() === 'failure') { diff --git a/static/js/ajax-notifications.js b/static/js/ajax-notifications.js index 40a1bbf84c..6e64a090a2 100644 --- a/static/js/ajax-notifications.js +++ b/static/js/ajax-notifications.js @@ -1,4 +1,4 @@ -var messageUrl = 'ui/get_messages'; // eslint-disable-line xo/filename-case +var messageUrl = 'ui/get_messages'; var test = !1; var iconUrl = 'images/ico/favicon-120.png'; @@ -19,10 +19,10 @@ function displayPNotify(type, title, message) { new PNotify({ // eslint-disable-line no-new type: type, title: title, - text: message.replace(/]*)?>/ig, '\n') - .replace(/<[\/]?b(?:\s[^>]*)?>/ig, '*') - .replace(/]*)?>/ig, '[').replace(/<[\/]i>/ig, ']') - .replace(/<(?:[\/]?ul|\/li)(?:\s[^>]*)?>/ig, '').replace(/]*)?>/ig, '\n* ') + text: message.replace(/]*)?>/ig, '\n') + .replace(/<[/]?b(?:\s[^>]*)?>/ig, '*') + .replace(/]*)?>/ig, '[').replace(/<[/]i>/ig, ']') + .replace(/<(?:[/]?ul|\/li)(?:\s[^>]*)?>/ig, '').replace(/]*)?>/ig, '\n* ') }); } diff --git a/static/js/common/init.js b/static/js/common/init.js index 2a00c04de6..f6956def1a 100644 --- a/static/js/common/init.js +++ b/static/js/common/init.js @@ -20,7 +20,7 @@ MEDUSA.common.init = function() { } return offset; } - + $(window).resize(function() { $('.backstretch').css('top', backstretchOffset()); }); @@ -43,11 +43,11 @@ MEDUSA.common.init = function() { return rgb; } - // function to convert rgb(0,0,0) into #000000 + // function to convert rgb(0,0,0) into #000000 function rgb2hex(rgb) { rgb = rgb.match(/^rgb\((\d+),\s*(\d+),\s*(\d+)\)$/); function hex(x) { - return ('0' + parseInt(x).toString(16)).slice(-2); + return ('0' + parseInt(x, 10).toString(16)).slice(-2); } return '#' + hex(rgb[1]) + hex(rgb[2]) + hex(rgb[3]); } @@ -56,7 +56,6 @@ MEDUSA.common.init = function() { var allCells = $('.triggerhighlight'); allCells.on('mouseover', function() { var el = $(this); - var pos = el.index(); revertBackgroundColor = rgb2hex($(this).parent().css('background-color')); // fetch the original background-color to revert back to var highlightBackgroundColor = colorLuminance(revertBackgroundColor, -0.15); // change highlight color based on original color el.parent().find('.triggerhighlight').css('background-color', highlightBackgroundColor); // setting highlight background-color diff --git a/static/js/config-providers.js b/static/js/config-providers.js index 8baa6074be..a15b6c891e 100644 --- a/static/js/config-providers.js +++ b/static/js/config-providers.js @@ -85,8 +85,8 @@ $(document).ready(function() { // eslint-disable-line max-lines $(this).makeNewznabProviderString(); }; - $.fn.addTorrentRssProvider = function(id, name, url, cookies, title_tag) { // eslint-disable-line max-params - var newData = [name, url, cookies, title_tag]; + $.fn.addTorrentRssProvider = function(id, name, url, cookies, titleTag) { // eslint-disable-line max-params + var newData = [name, url, cookies, titleTag]; torrentRssProviders[id] = newData; $('#editATorrentRssProvider').addOption(id, name); @@ -118,10 +118,10 @@ $(document).ready(function() { // eslint-disable-line max-lines $(this).makeNewznabProviderString(); }; - $.fn.updateTorrentRssProvider = function(id, url, cookies, title_tag) { + $.fn.updateTorrentRssProvider = function(id, url, cookies, titleTag) { torrentRssProviders[id][1] = url; torrentRssProviders[id][2] = cookies; - torrentRssProviders[id][3] = title_tag; + torrentRssProviders[id][3] = titleTag; $(this).populateTorrentRssSection(); $(this).makeTorrentRssProviderString(); }; @@ -382,9 +382,9 @@ $(document).ready(function() { // eslint-disable-line max-lines var url = $('#torrentrss_url').val(); var cookies = $('#torrentrss_cookies').val(); - var title_tag = $('#torrentrss_title_tag').val(); + var titleTag = $('#torrentrss_title_tag').val(); - $(this).updateTorrentRssProvider(selectedProvider, url, cookies, title_tag); + $(this).updateTorrentRssProvider(selectedProvider, url, cookies, titleTag); }); $('body').on('change', '#editAProvider', function() { @@ -476,14 +476,15 @@ $(document).ready(function() { // eslint-disable-line max-lines var name = $('#torrentrss_name').val(); var url = $('#torrentrss_url').val(); var cookies = $('#torrentrss_cookies').val(); - var title_tag = $('#torrentrss_title_tag').val(); + var titleTag = $('#torrentrss_title_tag').val(); var params = { name: name, url: url, cookies: cookies, - title_tag: title_tag + titleTag: titleTag }; + // @TODO: Move to the API // send to the form with ajax, get a return value $.getJSON('config/providers/canAddTorrentRssProvider', params, function(data) { if (data.error !== undefined) { @@ -491,7 +492,7 @@ $(document).ready(function() { // eslint-disable-line max-lines return; } - $(this).addTorrentRssProvider(data.success, name, url, cookies, title_tag); + $(this).addTorrentRssProvider(data.success, name, url, cookies, titleTag); $(this).refreshEditAProvider(); }); }); diff --git a/static/js/config/index.js b/static/js/config/index.js index a8eaf297fe..afcc5e68b3 100644 --- a/static/js/config/index.js +++ b/static/js/config/index.js @@ -4,8 +4,7 @@ MEDUSA.config.index = function() { $('label[for="proxy_indexers"]').hide(); } - - $('#theme_name').on('change', function(){ + $('#theme_name').on('change', function() { api.patch('config', { theme: { name: $(this).val() @@ -13,8 +12,8 @@ MEDUSA.config.index = function() { }).then(function(response) { log.info(response); window.location.reload(); - }).catch(function (error) { - log.info(error); + }).catch(function(err) { + log.error(err); }); }); diff --git a/static/js/core.js b/static/js/core.js index c5cabd7855..ca4a0d89de 100644 --- a/static/js/core.js +++ b/static/js/core.js @@ -33,14 +33,13 @@ var UTIL = { } var body = document.body; - $('[asset]').each(function(){ + $('[asset]').each(function() { let asset = $(this).attr('asset'); let path = apiRoot + 'asset/' + asset + '&api_key=' + apiKey; if (this.tagName.toLowerCase() === 'img') { if ($(this).attr('lazy') === 'on') { $(this).attr('data-original', path); - } - else { + } else { $(this).attr('src', path); } } @@ -93,6 +92,7 @@ if (!document.location.pathname.endsWith('/login/')) { $(document).ready(UTIL.init); } }).catch(function(err) { + log.error(err); alert('Unable to connect to Medusa!'); // eslint-disable-line no-alert }); } diff --git a/static/js/errorlogs/viewlogs.js b/static/js/errorlogs/viewlogs.js index 9c23117bae..ba60c30ec6 100644 --- a/static/js/errorlogs/viewlogs.js +++ b/static/js/errorlogs/viewlogs.js @@ -1,20 +1,18 @@ MEDUSA.errorlogs.viewlogs = function() { - var getParam = function() { - return params = $.param({ - min_level: $('select[name=min_level]').val(), // eslint-disable-line camelcase - log_filter: $('select[name=log_filter]').val(), // eslint-disable-line camelcase - log_period: $('select[name=log_period]').val(), // eslint-disable-line camelcase - log_search: $('#log_search').val() // eslint-disable-line camelcase - }); - } + var params = $.param({ + min_level: $('select[name=min_level]').val(), // eslint-disable-line camelcase + log_filter: $('select[name=log_filter]').val(), // eslint-disable-line camelcase + log_period: $('select[name=log_period]').val(), // eslint-disable-line camelcase + log_search: $('#log_search').val() // eslint-disable-line camelcase + }); - $('#min_level,#log_filter,#log_search,#log_period').on('keyup change', _.debounce(function() { // eslint-disable-line no-undef + $('#min_level,#log_filter,#log_search,#log_period').on('keyup change', _.debounce(function() { $('#min_level').prop('disabled', true); $('#log_filter').prop('disabled', true); $('#log_period').prop('disabled', true); document.body.style.cursor = 'wait'; - $.get('errorlogs/viewlog/?' + getParam(), function(data) { + $.get('errorlogs/viewlog/?' + params, function(data) { history.pushState('data', '', 'errorlogs/viewlog/?' + params); $('pre').html($(data).find('pre').html()); $('#min_level').prop('disabled', false); @@ -25,8 +23,8 @@ MEDUSA.errorlogs.viewlogs = function() { }, 500)); $(document.body).on('click', '#viewlog-text-view', function(e) { - e.preventDefault(); - var win = window.open('errorlogs/viewlog/?' + getParam() + '&text_view=1', '_blank'); - win.focus(); - }) + e.preventDefault(); + var win = window.open('errorlogs/viewlog/?' + params + '&text_view=1', '_blank'); + win.focus(); + }); }; diff --git a/static/js/history/index.js b/static/js/history/index.js index cd2b0bd52d..c505ccd687 100644 --- a/static/js/history/index.js +++ b/static/js/history/index.js @@ -3,23 +3,35 @@ MEDUSA.history.index = function() { widgets: ['saveSort', 'zebra', 'filter'], sortList: [[0, 1]], textExtraction: (function() { - if ($.isMeta({'layout': 'history'}, ['detailed'])) { + if ($.isMeta({layout: 'history'}, ['detailed'])) { return { // 0: Time 1: Episode 2: Action 3: Provider 4: Quality - 0: function(node) { return $(node).find('time').attr('datetime'); }, // Time - 1: function(node) { return $(node).find('a').text(); } // Episode + 0: function(node) { + return $(node).find('time').attr('datetime'); + }, + 1: function(node) { + return $(node).find('a').text(); + } }; } return { // 0: Time 1: Episode 2: Snatched 3: Downloaded 4: Quality - 0: function(node) { return $(node).find('time').attr('datetime'); }, // Time - 1: function(node) { return $(node).find('a').text(); }, // Episode - 2: function(node) { return $(node).find('img').attr('title') === undefined ? '' : $(node).find('img').attr('title'); }, // Snatched - 3: function(node) { return $(node).find('img').attr('title') === undefined ? '' : $(node).find('img').attr('title'); } // Downloaded + 0: function(node) { + return $(node).find('time').attr('datetime'); + }, + 1: function(node) { + return $(node).find('a').text(); + }, // Episode + 2: function(node) { + return $(node).find('img').attr('title') === undefined ? '' : $(node).find('img').attr('title'); + }, + 3: function(node) { + return $(node).find('img').attr('title') === undefined ? '' : $(node).find('img').attr('title'); + } }; })(), headers: (function() { - if ($.isMeta({'layout': 'history'}, ['detailed'])) { + if ($.isMeta({layout: 'history'}, ['detailed'])) { return { 0: {sorter: 'realISODate'} }; diff --git a/static/js/home/display-show.js b/static/js/home/display-show.js index 4dc7e8370f..2dcf267043 100644 --- a/static/js/home/display-show.js +++ b/static/js/home/display-show.js @@ -418,7 +418,7 @@ MEDUSA.home.displayShow = function() { // eslint-disable-line max-lines api.patch('config', { layout: { show: { - specials: $(this).text() === 'Hide' ? false : true + specials: $(this).text() !== 'Hide' } } }).then(function(response) { diff --git a/static/js/home/index.js b/static/js/home/index.js index ba014a4f42..99762a9563 100644 --- a/static/js/home/index.js +++ b/static/js/home/index.js @@ -188,11 +188,9 @@ MEDUSA.home.index = function() { }, sortStable: true, sortAppend: [[2, 0]] - }) - .bind('sortEnd',function(e, t){ + }).bind('sortEnd', function() { imgLazyLoad.handleScroll(); - }) - .bind('filterEnd',function(e, t){ + }).bind('filterEnd', function() { imgLazyLoad.handleScroll(); }); @@ -228,7 +226,7 @@ MEDUSA.home.index = function() { return (indexer.length && parseInt(indexer, 10)) || Number.NEGATIVE_INFINITY; } } - }).on('layoutComplete arrangeComplete removeComplete', function () { + }).on('layoutComplete arrangeComplete removeComplete', function() { imgLazyLoad.update(); imgLazyLoad.handleScroll(); }); diff --git a/static/js/home/restart.js b/static/js/home/restart.js index 5840356722..7bf39d2bef 100644 --- a/static/js/home/restart.js +++ b/static/js/home/restart.js @@ -2,6 +2,7 @@ MEDUSA.home.restart = function() { var currentPid = $('.messages').attr('current-pid'); var defaultPage = $('.messages').attr('default-page'); var checkIsAlive = setInterval(function() { + // @TODO: Move to API $.get('home/is_alive/', function(data) { if (data.msg.toLowerCase() === 'nope') { // if it's still initializing then just wait and try again diff --git a/static/js/home/snatch-selection.js b/static/js/home/snatch-selection.js index e85f869f7e..0737e43b9c 100644 --- a/static/js/home/snatch-selection.js +++ b/static/js/home/snatch-selection.js @@ -137,6 +137,7 @@ MEDUSA.home.snatchSelection = function() { }, timeout: 15000 // timeout after 15s }).done(function(data) { + // @TODO: Combine the lower if statements if (data.result === 'refresh') { self.refreshResults(); updateSpinner('Refreshed results...', true); diff --git a/static/js/manage/backlog-overview.js b/static/js/manage/backlog-overview.js index 3b5c262798..9d362af68e 100644 --- a/static/js/manage/backlog-overview.js +++ b/static/js/manage/backlog-overview.js @@ -1,5 +1,4 @@ MEDUSA.manage.backlogOverview = function() { - checkForcedSearch(); function checkForcedSearch() { @@ -43,14 +42,14 @@ MEDUSA.manage.backlogOverview = function() { if (episodeStatus.indexOf('snatched') >= 0) { img.prop('src', 'images/yes16.png'); setTimeout(function() { - img.parent().parent().parent().remove() - }, 3000) + img.parent().parent().parent().remove(); + }, 3000); } else { img.prop('src', 'images/search16.png'); } } } - }); + }); } $('#pickShow').on('change', function() { @@ -60,7 +59,7 @@ MEDUSA.manage.backlogOverview = function() { } }); - $('.forceBacklog').on('click', function(){ + $('.forceBacklog').on('click', function() { $.get($(this).attr('href')); $(this).text('Searching...'); return false; @@ -78,8 +77,8 @@ MEDUSA.manage.backlogOverview = function() { if (data.result.toLowerCase() === 'success') { img.prop('src', 'images/yes16.png'); setTimeout(function() { - img.parent().parent().parent().remove() - }, 3000) + img.parent().parent().parent().remove(); + }, 3000); } else { img.prop('src', 'images/no16.png'); } diff --git a/static/js/mass-update.js b/static/js/mass-update.js index f2847e54ce..346c826349 100644 --- a/static/js/mass-update.js +++ b/static/js/mass-update.js @@ -62,6 +62,8 @@ $(document).ready(function() { } }); + var totalCount = [].concat.apply([], [updateArr, refreshArr, renameArr, subtitleArr, deleteArr, removeArr, metadataArr]).length; // eslint-disable-line no-useless-call + if (deleteCount >= 1) { $.confirm({ title: 'Delete Shows', @@ -76,7 +78,7 @@ $(document).ready(function() { deleteArr.push($(this).attr('id').split('-')[1]); } }); - if (updateArr.length + refreshArr.length + renameArr.length + subtitleArr.length + deleteArr.length + removeArr.length + metadataArr.length === 0) { + if (totalCount === 0) { return false; } var params = $.param({ @@ -93,7 +95,7 @@ $(document).ready(function() { } }); } - if (updateArr.length + refreshArr.length + renameArr.length + subtitleArr.length + deleteArr.length + removeArr.length + metadataArr.length === 0) { + if (totalCount === 0) { return false; } var params = $.param({ diff --git a/static/js/quality-chooser.js b/static/js/quality-chooser.js index d435284e82..0f718b41de 100644 --- a/static/js/quality-chooser.js +++ b/static/js/quality-chooser.js @@ -107,10 +107,10 @@ $(document).ready(function() { $('#preferred_text2').hide(); $('#quality_explanation').show(); - if (preferred.length) { + if (preferred.length >= 1) { $('#preferred_text1').show(); $('#preferred_text2').show(); - } else if (allowed.length) { + } else if (allowed.length >= 1) { $('#allowed_text').show(); } else { $('#quality_explanation').hide(); diff --git a/static/js/root-dirs.js b/static/js/root-dirs.js index 5e6ed14b27..3db59b02e7 100644 --- a/static/js/root-dirs.js +++ b/static/js/root-dirs.js @@ -1,3 +1,4 @@ +// @TODO: Remove this when we fully drop support for IE > 8 // Avoid `console` errors in browsers that lack a console. (function() { // eslint-disable-line wrap-iife var method; diff --git a/static/js/schedule/index.js b/static/js/schedule/index.js index 7b0600270a..04695a0008 100644 --- a/static/js/schedule/index.js +++ b/static/js/schedule/index.js @@ -1,5 +1,5 @@ MEDUSA.schedule.index = function() { - if ($.isMeta({'layout': 'schedule'}, ['list'])) { + if ($.isMeta({layout: 'schedule'}, ['list'])) { var sortCodes = { date: 0, show: 2, @@ -37,7 +37,7 @@ MEDUSA.schedule.index = function() { $.ajaxEpSearch(); } - if ($.isMeta({'layout': 'schedule'}, ['banner', 'poster'])) { + if ($.isMeta({layout: 'schedule'}, ['banner', 'poster'])) { $.ajaxEpSearch({ size: 16, loadingImage: 'loading16' + MEDUSA.config.themeSpinner + '.gif' diff --git a/views/displayShow.mako b/views/displayShow.mako index d1a2000094..93b6ad0867 100644 --- a/views/displayShow.mako +++ b/views/displayShow.mako @@ -96,7 +96,6 @@

    ${"Season " + str(epResult["season"]) if int(epResult["season"]) > 0 else "Specials"} - % if not any([i for i in sql_results if epResult['season'] == i['season'] and int(i['status']) == 1]): search % endif @@ -145,7 +144,6 @@ search % endif

    -
    % if not app.DISPLAY_ALL_SEASONS: diff --git a/yarn.lock b/yarn.lock new file mode 100644 index 0000000000..6c9f2658cb --- /dev/null +++ b/yarn.lock @@ -0,0 +1,4569 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +JSONStream@^0.8.4: + version "0.8.4" + resolved "https://registry.yarnpkg.com/JSONStream/-/JSONStream-0.8.4.tgz#91657dfe6ff857483066132b4618b62e8f4887bd" + dependencies: + jsonparse "0.0.5" + through ">=2.2.7 <3" + +abbrev@1, abbrev@^1.0.7: + version "1.1.0" + resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.0.tgz#d0554c2256636e2f56e7c2e5ad183f859428d81f" + +abbrev@~1.0.4: + version "1.0.9" + resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.0.9.tgz#91b4792588a7738c25f35dd6f63752a2f8776135" + +acorn-jsx@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-3.0.1.tgz#afdf9488fb1ecefc8348f6fb22f464e32a58b36b" + dependencies: + acorn "^3.0.4" + +acorn@4.0.4: + version "4.0.4" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-4.0.4.tgz#17a8d6a7a6c4ef538b814ec9abac2779293bf30a" + +acorn@^3.0.4, acorn@^3.1.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-3.3.0.tgz#45e37fb39e8da3f25baee3ff5369e2bb5f22017a" + +ajv-keywords@^1.0.0: + version "1.5.1" + resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-1.5.1.tgz#314dd0a4b3368fad3dfcdc54ede6171b886daf3c" + +ajv@^4.7.0: + version "4.11.3" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-4.11.3.tgz#ce30bdb90d1254f762c75af915fb3a63e7183d22" + dependencies: + co "^4.6.0" + json-stable-stringify "^1.0.1" + +align-text@^0.1.1, align-text@^0.1.3: + version "0.1.4" + resolved "https://registry.yarnpkg.com/align-text/-/align-text-0.1.4.tgz#0cd90a561093f35d0a99256c22b7069433fad117" + dependencies: + kind-of "^3.0.2" + longest "^1.0.1" + repeat-string "^1.5.2" + +amdefine@>=0.0.4: + version "1.0.1" + resolved "https://registry.yarnpkg.com/amdefine/-/amdefine-1.0.1.tgz#4a5282ac164729e93619bcfd3ad151f817ce91f5" + +ansi-align@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/ansi-align/-/ansi-align-1.1.0.tgz#2f0c1658829739add5ebb15e6b0c6e3423f016ba" + dependencies: + string-width "^1.0.1" + +ansi-escapes@^1.1.0, ansi-escapes@^1.3.0, ansi-escapes@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-1.4.0.tgz#d3a8a83b319aa67793662b13e761c7911422306e" + +ansi-regex@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-0.1.0.tgz#55ca60db6900857c423ae9297980026f941ed903" + +ansi-regex@^0.2.0, ansi-regex@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-0.2.1.tgz#0d8e946967a3d8143f93e24e298525fc1b2235f9" + +ansi-regex@^1.0.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-1.1.1.tgz#41c847194646375e6a1a5d10c3ca054ef9fc980d" + +ansi-regex@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" + +ansi-styles@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-1.1.0.tgz#eaecbf66cd706882760b2f4691582b8f55d7a7de" + +ansi-styles@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" + +ansicolors@^0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/ansicolors/-/ansicolors-0.3.2.tgz#665597de86a9ffe3aa9bfbe6cae5c6ea426b4979" + +archy@0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/archy/-/archy-0.0.2.tgz#910f43bf66141fc335564597abc189df44b3d35e" + +archy@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/archy/-/archy-1.0.0.tgz#f9c8c13757cc1dd7bc379ac77b2c62a5c2868c40" + +argparse@^1.0.7: + version "1.0.9" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.9.tgz#73d83bc263f86e97f8cc4f6bae1b0e90a7d22c86" + dependencies: + sprintf-js "~1.0.2" + +"argparse@~ 0.1.11": + version "0.1.16" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-0.1.16.tgz#cfd01e0fbba3d6caed049fbd758d40f65196f57c" + dependencies: + underscore "~1.7.0" + underscore.string "~2.4.0" + +arr-diff@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-2.0.0.tgz#8f3b827f955a8bd669697e4a4256ac3ceae356cf" + dependencies: + arr-flatten "^1.0.1" + +arr-flatten@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.0.1.tgz#e5ffe54d45e19f32f216e91eb99c8ce892bb604b" + +array-differ@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/array-differ/-/array-differ-1.0.0.tgz#eff52e3758249d33be402b8bb8e564bb2b5d4031" + +array-filter@~0.0.0: + version "0.0.1" + resolved "https://registry.yarnpkg.com/array-filter/-/array-filter-0.0.1.tgz#7da8cf2e26628ed732803581fd21f67cacd2eeec" + +array-find-index@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/array-find-index/-/array-find-index-1.0.2.tgz#df010aa1287e164bbda6f9723b0a96a1ec4187a1" + +array-map@~0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/array-map/-/array-map-0.0.0.tgz#88a2bab73d1cf7bcd5c1b118a003f66f665fa662" + +array-reduce@~0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/array-reduce/-/array-reduce-0.0.0.tgz#173899d3ffd1c7d9383e4479525dbe278cab5f2b" + +array-union@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" + dependencies: + array-uniq "^1.0.1" + +array-uniq@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6" + +array-unique@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.2.1.tgz#a1d97ccafcbc2625cc70fadceb36a50c58b01a53" + +arrify@^1.0.0, arrify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d" + +asap@~2.0.3: + version "2.0.5" + resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.5.tgz#522765b50c3510490e52d7dcfe085ef9ba96958f" + +asn1@0.1.11: + version "0.1.11" + resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.1.11.tgz#559be18376d08a4ec4dbe80877d27818639b2df7" + +asn1@~0.2.3: + version "0.2.3" + resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.3.tgz#dac8787713c9966849fc8180777ebe9c1ddf3b86" + +assert-plus@^0.1.5: + version "0.1.5" + resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-0.1.5.tgz#ee74009413002d84cec7219c6ac811812e723160" + +assert-plus@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-0.2.0.tgz#d74e1b87e7affc0db8aadb7021f3fe48101ab234" + +assert-plus@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" + +async@^0.9.0, async@~0.9.0: + version "0.9.2" + resolved "https://registry.yarnpkg.com/async/-/async-0.9.2.tgz#aea74d5e61c1f899613bf64bda66d4c78f2fd17d" + +async@^1.5.2, async@~1.5, async@~1.5.2: + version "1.5.2" + resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a" + +async@~0.1.22: + version "0.1.22" + resolved "https://registry.yarnpkg.com/async/-/async-0.1.22.tgz#0fc1aaa088a0e3ef0ebe2d8831bab0dcf8845061" + +async@~0.2.6, async@~0.2.8: + version "0.2.10" + resolved "https://registry.yarnpkg.com/async/-/async-0.2.10.tgz#b6bbe0b0674b9d719708ca38de8c237cb526c3d1" + +asynckit@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + +autoprefixer@^6.0.0: + version "6.7.5" + resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-6.7.5.tgz#50848f39dc08730091d9495023487e7cc21f518d" + dependencies: + browserslist "^1.7.5" + caniuse-db "^1.0.30000624" + normalize-range "^0.1.2" + num2fraction "^1.2.2" + postcss "^5.2.15" + postcss-value-parser "^3.2.3" + +aws-sign2@~0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.5.0.tgz#c57103f7a17fc037f02d7c2e64b602ea223f7d63" + +aws-sign2@~0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.6.0.tgz#14342dd38dbcc94d0e5b87d763cd63612c0e794f" + +aws4@^1.2.1: + version "1.6.0" + resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.6.0.tgz#83ef5ca860b2b32e4a0deedee8c771b9db57471e" + +babel-code-frame@^6.16.0: + version "6.22.0" + resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.22.0.tgz#027620bee567a88c32561574e7fd0801d33118e4" + dependencies: + chalk "^1.1.0" + esutils "^2.0.2" + js-tokens "^3.0.0" + +balanced-match@^0.4.0, balanced-match@^0.4.1: + version "0.4.2" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-0.4.2.tgz#cb3f3e3c732dc0f01ee70b403f302e61d7709838" + +bcrypt-pbkdf@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz#63bc5dcb61331b92bc05fd528953c33462a06f8d" + dependencies: + tweetnacl "^0.14.3" + +binary@~0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/binary/-/binary-0.3.0.tgz#9f60553bc5ce8c3386f3b553cff47462adecaa79" + dependencies: + buffers "~0.1.1" + chainsaw "~0.1.0" + +bl@^0.9.0, bl@~0.9.0: + version "0.9.5" + resolved "https://registry.yarnpkg.com/bl/-/bl-0.9.5.tgz#c06b797af085ea00bc527afc8efcf11de2232054" + dependencies: + readable-stream "~1.0.26" + +boom@0.4.x: + version "0.4.2" + resolved "https://registry.yarnpkg.com/boom/-/boom-0.4.2.tgz#7a636e9ded4efcefb19cef4947a3c67dfaee911b" + dependencies: + hoek "0.9.x" + +boom@2.x.x: + version "2.10.1" + resolved "https://registry.yarnpkg.com/boom/-/boom-2.10.1.tgz#39c8918ceff5799f83f9492a848f625add0c766f" + dependencies: + hoek "2.x.x" + +bower-config@~0.5.0, bower-config@~0.5.2: + version "0.5.2" + resolved "https://registry.yarnpkg.com/bower-config/-/bower-config-0.5.2.tgz#1f7d2e899e99b70c29a613e70d4c64590414b22e" + dependencies: + graceful-fs "~2.0.0" + mout "~0.9.0" + optimist "~0.6.0" + osenv "0.0.3" + +bower-endpoint-parser@~0.2.2: + version "0.2.2" + resolved "https://registry.yarnpkg.com/bower-endpoint-parser/-/bower-endpoint-parser-0.2.2.tgz#00b565adbfab6f2d35addde977e97962acbcb3f6" + +bower-json@~0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/bower-json/-/bower-json-0.4.0.tgz#a99c3ccf416ef0590ed0ded252c760f1c6d93766" + dependencies: + deep-extend "~0.2.5" + graceful-fs "~2.0.0" + intersect "~0.0.3" + +bower-logger@~0.2.2: + version "0.2.2" + resolved "https://registry.yarnpkg.com/bower-logger/-/bower-logger-0.2.2.tgz#39be07e979b2fc8e03a94634205ed9422373d381" + +bower-registry-client@~0.2.0: + version "0.2.4" + resolved "https://registry.yarnpkg.com/bower-registry-client/-/bower-registry-client-0.2.4.tgz#269fc7e898b627fb939d1144a593254d7fbbeebc" + dependencies: + async "~0.2.8" + bower-config "~0.5.0" + graceful-fs "~2.0.0" + lru-cache "~2.3.0" + mkdirp "~0.3.5" + request "~2.51.0" + request-replay "~0.2.0" + rimraf "~2.2.0" + +bower@~1.3.0: + version "1.3.12" + resolved "https://registry.yarnpkg.com/bower/-/bower-1.3.12.tgz#37de0edb3904baf90aee13384a1a379a05ee214c" + dependencies: + abbrev "~1.0.4" + archy "0.0.2" + bower-config "~0.5.2" + bower-endpoint-parser "~0.2.2" + bower-json "~0.4.0" + bower-logger "~0.2.2" + bower-registry-client "~0.2.0" + cardinal "0.4.0" + chalk "0.5.0" + chmodr "0.1.0" + decompress-zip "0.0.8" + fstream "~1.0.2" + fstream-ignore "~1.0.1" + glob "~4.0.2" + graceful-fs "~3.0.1" + handlebars "~2.0.0" + inquirer "0.7.1" + insight "0.4.3" + is-root "~1.0.0" + junk "~1.0.0" + lockfile "~1.0.0" + lru-cache "~2.5.0" + mkdirp "0.5.0" + mout "~0.9.0" + nopt "~3.0.0" + opn "~1.0.0" + osenv "0.1.0" + p-throttler "0.1.0" + promptly "0.2.0" + q "~1.0.1" + request "~2.42.0" + request-progress "0.3.0" + retry "0.6.0" + rimraf "~2.2.0" + semver "~2.3.0" + shell-quote "~1.4.1" + stringify-object "~1.0.0" + tar-fs "0.5.2" + tmp "0.0.23" + update-notifier "0.2.0" + which "~1.0.5" + +bower@~1.7.7: + version "1.7.9" + resolved "https://registry.yarnpkg.com/bower/-/bower-1.7.9.tgz#b7296c2393e0d75edaa6ca39648132dd255812b0" + +boxen@^0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/boxen/-/boxen-0.3.1.tgz#a7d898243ae622f7abb6bb604d740a76c6a5461b" + dependencies: + chalk "^1.1.1" + filled-array "^1.0.0" + object-assign "^4.0.1" + repeating "^2.0.0" + string-width "^1.0.1" + widest-line "^1.0.0" + +boxen@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/boxen/-/boxen-0.6.0.tgz#8364d4248ac34ff0ef1b2f2bf49a6c60ce0d81b6" + dependencies: + ansi-align "^1.1.0" + camelcase "^2.1.0" + chalk "^1.1.1" + cli-boxes "^1.0.0" + filled-array "^1.0.0" + object-assign "^4.0.1" + repeating "^2.0.0" + string-width "^1.0.1" + widest-line "^1.0.0" + +brace-expansion@^1.0.0: + version "1.1.6" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.6.tgz#7197d7eaa9b87e648390ea61fc66c84427420df9" + dependencies: + balanced-match "^0.4.1" + concat-map "0.0.1" + +braces@^1.8.2: + version "1.8.5" + resolved "https://registry.yarnpkg.com/braces/-/braces-1.8.5.tgz#ba77962e12dff969d6b76711e914b737857bf6a7" + dependencies: + expand-range "^1.8.1" + preserve "^0.2.0" + repeat-element "^1.1.2" + +browserify-zlib@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/browserify-zlib/-/browserify-zlib-0.1.4.tgz#bb35f8a519f600e0fa6b8485241c979d0141fb2d" + dependencies: + pako "~0.2.0" + +browserslist@^1.1.1, browserslist@^1.1.3, browserslist@^1.7.5: + version "1.7.5" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-1.7.5.tgz#eca4713897b51e444283241facf3985de49a9e2b" + dependencies: + caniuse-db "^1.0.30000624" + electron-to-chromium "^1.2.3" + +buf-compare@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/buf-compare/-/buf-compare-1.0.1.tgz#fef28da8b8113a0a0db4430b0b6467b69730b34a" + +buffer-shims@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/buffer-shims/-/buffer-shims-1.0.0.tgz#9978ce317388c649ad8793028c3477ef044a8b51" + +buffers@~0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/buffers/-/buffers-0.1.1.tgz#b24579c3bed4d6d396aeee6d9a8ae7f5482ab7bb" + +builtin-modules@^1.0.0, builtin-modules@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-1.1.1.tgz#270f076c5a72c02f5b65a47df94c5fe3a278892f" + +builtins@0.0.7: + version "0.0.7" + resolved "https://registry.yarnpkg.com/builtins/-/builtins-0.0.7.tgz#355219cd6cf18dbe7c01cc7fd2dce765cfdc549a" + +caller-path@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/caller-path/-/caller-path-0.1.0.tgz#94085ef63581ecd3daa92444a8fe94e82577751f" + dependencies: + callsites "^0.2.0" + +callsites@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-0.2.0.tgz#afab96262910a7f33c19a5775825c69f34e350ca" + +camelcase-keys@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-2.1.0.tgz#308beeaffdf28119051efa1d932213c91b8f92e7" + dependencies: + camelcase "^2.0.0" + map-obj "^1.0.0" + +camelcase@^1.0.2: + version "1.2.1" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-1.2.1.tgz#9bb5304d2e0b56698b2c758b08a3eaa9daa58a39" + +camelcase@^2.0.0, camelcase@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-2.1.1.tgz#7c1d16d679a1bbe59ca02cacecfb011e201f5a1f" + +camelcase@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-3.0.0.tgz#32fc4b9fcdaf845fcdf7e73bb97cac2261f0ab0a" + +caniuse-db@^1.0.30000187, caniuse-db@^1.0.30000624: + version "1.0.30000625" + resolved "https://registry.yarnpkg.com/caniuse-db/-/caniuse-db-1.0.30000625.tgz#bfb077a7743ee076aa37ac9a4b7c7a369fc522cf" + +capture-stack-trace@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/capture-stack-trace/-/capture-stack-trace-1.0.0.tgz#4a6fa07399c26bba47f0b2496b4d0fb408c5550d" + +cardinal@0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/cardinal/-/cardinal-0.4.0.tgz#7d10aafb20837bde043c45e43a0c8c28cdaae45e" + dependencies: + redeyed "~0.4.0" + +caseless@~0.11.0: + version "0.11.0" + resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.11.0.tgz#715b96ea9841593cc33067923f5ec60ebda4f7d7" + +caseless@~0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.6.0.tgz#8167c1ab8397fb5bb95f96d28e5a81c50f247ac4" + +caseless@~0.8.0: + version "0.8.0" + resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.8.0.tgz#5bca2881d41437f54b2407ebe34888c7b9ad4f7d" + +center-align@^0.1.1: + version "0.1.3" + resolved "https://registry.yarnpkg.com/center-align/-/center-align-0.1.3.tgz#aa0d32629b6ee972200411cbd4461c907bc2b7ad" + dependencies: + align-text "^0.1.3" + lazy-cache "^1.0.3" + +chainsaw@~0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/chainsaw/-/chainsaw-0.1.0.tgz#5eab50b28afe58074d0d58291388828b5e5fbc98" + dependencies: + traverse ">=0.3.0 <0.4" + +chalk@0.5.0, chalk@^0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-0.5.0.tgz#375dfccbc21c0a60a8b61bc5b78f3dc2a55c212f" + dependencies: + ansi-styles "^1.1.0" + escape-string-regexp "^1.0.0" + has-ansi "^0.1.0" + strip-ansi "^0.3.0" + supports-color "^0.2.0" + +chalk@^0.5.1: + version "0.5.1" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-0.5.1.tgz#663b3a648b68b55d04690d49167aa837858f2174" + dependencies: + ansi-styles "^1.1.0" + escape-string-regexp "^1.0.0" + has-ansi "^0.1.0" + strip-ansi "^0.3.0" + supports-color "^0.2.0" + +chalk@^1.0.0, chalk@^1.1.0, chalk@^1.1.1, chalk@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" + dependencies: + ansi-styles "^2.2.1" + escape-string-regexp "^1.0.2" + has-ansi "^2.0.0" + strip-ansi "^3.0.0" + supports-color "^2.0.0" + +chmodr@0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/chmodr/-/chmodr-0.1.0.tgz#e09215a1d51542db2a2576969765bcf6125583eb" + +circular-json@^0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/circular-json/-/circular-json-0.3.1.tgz#be8b36aefccde8b3ca7aa2d6afc07a37242c0d2d" + +clean-css@~3.4.2: + version "3.4.25" + resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-3.4.25.tgz#9e9a52d5c1e6bc5123e1b2783fa65fe958946ede" + dependencies: + commander "2.8.x" + source-map "0.4.x" + +cli-boxes@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-1.0.0.tgz#4fa917c3e59c94a004cd61f8ee509da651687143" + +cli-color@~0.3.2: + version "0.3.3" + resolved "https://registry.yarnpkg.com/cli-color/-/cli-color-0.3.3.tgz#12d5bdd158ff8a0b0db401198913c03df069f6f5" + dependencies: + d "~0.1.1" + es5-ext "~0.10.6" + memoizee "~0.3.8" + timers-ext "0.1" + +cli-cursor@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-1.0.2.tgz#64da3f7d56a54412e59794bd62dc35295e8f2987" + dependencies: + restore-cursor "^1.0.1" + +cli-width@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-2.1.0.tgz#b234ca209b29ef66fc518d9b98d5847b00edf00a" + +clite@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/clite/-/clite-0.3.0.tgz#e7fcbc8cc5bd3e7f8b84ed48db12e9474cc73441" + dependencies: + abbrev "^1.0.7" + debug "^2.2.0" + es6-promise "^3.1.2" + lodash.defaults "^4.0.1" + lodash.defaultsdeep "^4.3.1" + lodash.mergewith "^4.3.1" + then-fs "^2.0.0" + update-notifier "^0.6.0" + yargs "^4.3.2" + +cliui@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-2.1.0.tgz#4b475760ff80264c762c3a1719032e91c7fea0d1" + dependencies: + center-align "^0.1.1" + right-align "^0.1.1" + wordwrap "0.0.2" + +cliui@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-3.2.0.tgz#120601537a916d29940f934da3b48d585a39213d" + dependencies: + string-width "^1.0.1" + strip-ansi "^3.0.1" + wrap-ansi "^2.0.0" + +clone-regexp@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/clone-regexp/-/clone-regexp-1.0.0.tgz#eae0a2413f55c0942f818c229fefce845d7f3b1c" + dependencies: + is-regexp "^1.0.0" + is-supported-regexp-flag "^1.0.0" + +co@^4.6.0: + version "4.6.0" + resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" + +code-point-at@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" + +coffee-script@~1.3.3: + version "1.3.3" + resolved "https://registry.yarnpkg.com/coffee-script/-/coffee-script-1.3.3.tgz#150d6b4cb522894369efed6a2101c20bc7f4a4f4" + +color-diff@^0.1.3: + version "0.1.7" + resolved "https://registry.yarnpkg.com/color-diff/-/color-diff-0.1.7.tgz#6db78cd9482a8e459d40821eaf4b503283dcb8e2" + +colorguard@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/colorguard/-/colorguard-1.2.0.tgz#f3facaf5caaeba4ef54653d9fb25bb73177c0d84" + dependencies: + chalk "^1.1.1" + color-diff "^0.1.3" + log-symbols "^1.0.2" + object-assign "^4.0.1" + pipetteur "^2.0.0" + plur "^2.0.0" + postcss "^5.0.4" + postcss-reporter "^1.2.1" + text-table "^0.2.0" + yargs "^1.2.6" + +colors@~0.6.0-1, colors@~0.6.2: + version "0.6.2" + resolved "https://registry.yarnpkg.com/colors/-/colors-0.6.2.tgz#2423fe6678ac0c5dae8852e5d0e5be08c997abcc" + +combined-stream@^1.0.5, combined-stream@~1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.5.tgz#938370a57b4a51dea2c77c15d5c5fdf895164009" + dependencies: + delayed-stream "~1.0.0" + +combined-stream@~0.0.4, combined-stream@~0.0.5: + version "0.0.7" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-0.0.7.tgz#0137e657baa5a7541c57ac37ac5fc07d73b4dc1f" + dependencies: + delayed-stream "0.0.5" + +commander@2.8.x: + version "2.8.1" + resolved "https://registry.yarnpkg.com/commander/-/commander-2.8.1.tgz#06be367febfda0c330aa1e2a072d3dc9762425d4" + dependencies: + graceful-readlink ">= 1.0.0" + +commander@^2.9.0: + version "2.9.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-2.9.0.tgz#9c99094176e12240cb22d6c5146098400fe0f7d4" + dependencies: + graceful-readlink ">= 1.0.0" + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + +concat-stream@^1.4.1, concat-stream@^1.4.6: + version "1.6.0" + resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.0.tgz#0aac662fd52be78964d5532f694784e70110acf7" + dependencies: + inherits "^2.0.3" + readable-stream "^2.2.2" + typedarray "^0.0.6" + +config-chain@~1.1.8: + version "1.1.11" + resolved "https://registry.yarnpkg.com/config-chain/-/config-chain-1.1.11.tgz#aba09747dfbe4c3e70e766a6e41586e1859fc6f2" + dependencies: + ini "^1.3.4" + proto-list "~1.2.1" + +configstore@^0.3.0, configstore@^0.3.1: + version "0.3.2" + resolved "https://registry.yarnpkg.com/configstore/-/configstore-0.3.2.tgz#25e4c16c3768abf75c5a65bc61761f495055b459" + dependencies: + graceful-fs "^3.0.1" + js-yaml "^3.1.0" + mkdirp "^0.5.0" + object-assign "^2.0.0" + osenv "^0.1.0" + user-home "^1.0.0" + uuid "^2.0.1" + xdg-basedir "^1.0.0" + +configstore@^1.0.0, configstore@^1.2.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/configstore/-/configstore-1.4.0.tgz#c35781d0501d268c25c54b8b17f6240e8a4fb021" + dependencies: + graceful-fs "^4.1.2" + mkdirp "^0.5.0" + object-assign "^4.0.1" + os-tmpdir "^1.0.0" + osenv "^0.1.0" + uuid "^2.0.1" + write-file-atomic "^1.1.2" + xdg-basedir "^2.0.0" + +configstore@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/configstore/-/configstore-2.1.0.tgz#737a3a7036e9886102aa6099e47bb33ab1aba1a1" + dependencies: + dot-prop "^3.0.0" + graceful-fs "^4.1.2" + mkdirp "^0.5.0" + object-assign "^4.0.1" + os-tmpdir "^1.0.0" + osenv "^0.1.0" + uuid "^2.0.1" + write-file-atomic "^1.1.2" + xdg-basedir "^2.0.0" + +contains-path@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/contains-path/-/contains-path-0.1.0.tgz#fe8cf184ff6670b6baef01a9d4861a5cbec4120a" + +core-assert@^0.2.0: + version "0.2.1" + resolved "https://registry.yarnpkg.com/core-assert/-/core-assert-0.2.1.tgz#f85e2cf9bfed28f773cc8b3fa5c5b69bdc02fe3f" + dependencies: + buf-compare "^1.0.0" + is-error "^2.2.0" + +core-js@^2.0.0: + version "2.4.1" + resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.4.1.tgz#4de911e667b0eae9124e34254b53aea6fc618d3e" + +core-util-is@~1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" + +cosmiconfig@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-2.1.1.tgz#817f2c2039347a1e9bf7d090c0923e53f749ca82" + dependencies: + js-yaml "^3.4.3" + minimist "^1.2.0" + object-assign "^4.1.0" + os-homedir "^1.0.1" + parse-json "^2.2.0" + require-from-string "^1.1.0" + +create-error-class@^3.0.1: + version "3.0.2" + resolved "https://registry.yarnpkg.com/create-error-class/-/create-error-class-3.0.2.tgz#06be7abef947a3f14a30fd610671d401bca8b7b6" + dependencies: + capture-stack-trace "^1.0.0" + +cross-spawn@^4.0.0: + version "4.0.2" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-4.0.2.tgz#7b9247621c23adfdd3856004a823cbe397424d41" + dependencies: + lru-cache "^4.0.1" + which "^1.2.9" + +cryptiles@0.2.x: + version "0.2.2" + resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-0.2.2.tgz#ed91ff1f17ad13d3748288594f8a48a0d26f325c" + dependencies: + boom "0.4.x" + +cryptiles@2.x.x: + version "2.0.5" + resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-2.0.5.tgz#3bdfecdc608147c1c67202fa291e7dca59eaa3b8" + dependencies: + boom "2.x.x" + +css-color-names@0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/css-color-names/-/css-color-names-0.0.3.tgz#de0cef16f4d8aa8222a320d5b6d7e9bbada7b9f6" + +css-rule-stream@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/css-rule-stream/-/css-rule-stream-1.1.0.tgz#3786e7198983d965a26e31957e09078cbb7705a2" + dependencies: + css-tokenize "^1.0.1" + duplexer2 "0.0.2" + ldjson-stream "^1.2.1" + through2 "^0.6.3" + +css-tokenize@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/css-tokenize/-/css-tokenize-1.0.1.tgz#4625cb1eda21c143858b7f81d6803c1d26fc14be" + dependencies: + inherits "^2.0.1" + readable-stream "^1.0.33" + +ctype@0.5.3: + version "0.5.3" + resolved "https://registry.yarnpkg.com/ctype/-/ctype-0.5.3.tgz#82c18c2461f74114ef16c135224ad0b9144ca12f" + +currently-unhandled@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/currently-unhandled/-/currently-unhandled-0.4.1.tgz#988df33feab191ef799a61369dd76c17adf957ea" + dependencies: + array-find-index "^1.0.1" + +d@^0.1.1, d@~0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/d/-/d-0.1.1.tgz#da184c535d18d8ee7ba2aa229b914009fae11309" + dependencies: + es5-ext "~0.10.2" + +dashdash@^1.12.0: + version "1.14.1" + resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" + dependencies: + assert-plus "^1.0.0" + +dateformat@1.0.2-1.2.3: + version "1.0.2-1.2.3" + resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-1.0.2-1.2.3.tgz#b0220c02de98617433b72851cf47de3df2cdbee9" + +debug@2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.2.0.tgz#f87057e995b1a1f6ae6a4960664137bc56f039da" + dependencies: + ms "0.7.1" + +debug@^2.1.1, debug@^2.2.0: + version "2.6.1" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.1.tgz#79855090ba2c4e3115cc7d8769491d58f0491351" + dependencies: + ms "0.7.2" + +decamelize@^1.0.0, decamelize@^1.1.1, decamelize@^1.1.2: + version "1.2.0" + resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" + +decompress-zip@0.0.8: + version "0.0.8" + resolved "https://registry.yarnpkg.com/decompress-zip/-/decompress-zip-0.0.8.tgz#4a265b22c7b209d7b24fa66f2b2dfbced59044f3" + dependencies: + binary "~0.3.0" + graceful-fs "~3.0.0" + mkpath "~0.1.0" + nopt "~2.2.0" + q "~1.0.0" + readable-stream "~1.1.8" + touch "0.0.2" + +deep-assign@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/deep-assign/-/deep-assign-1.0.0.tgz#b092743be8427dc621ea0067cdec7e70dd19f37b" + dependencies: + is-obj "^1.0.0" + +deep-extend@~0.2.5: + version "0.2.11" + resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.2.11.tgz#7a16ba69729132340506170494bc83f7076fe08f" + +deep-extend@~0.4.0: + version "0.4.1" + resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.4.1.tgz#efe4113d08085f4e6f9687759810f807469e2253" + +deep-is@~0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" + +deep-strict-equal@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/deep-strict-equal/-/deep-strict-equal-0.2.0.tgz#4a078147a8ab57f6a0d4f5547243cd22f44eb4e4" + dependencies: + core-assert "^0.2.0" + +defined@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693" + +del@^2.0.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/del/-/del-2.2.2.tgz#c12c981d067846c84bcaf862cff930d907ffd1a8" + dependencies: + globby "^5.0.0" + is-path-cwd "^1.0.0" + is-path-in-cwd "^1.0.0" + object-assign "^4.0.1" + pify "^2.0.0" + pinkie-promise "^2.0.0" + rimraf "^2.2.8" + +delayed-stream@0.0.5: + version "0.0.5" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-0.0.5.tgz#d4b1f43a93e8296dfe02694f4680bc37a313c73f" + +delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + +detective@~4.3.1: + version "4.3.2" + resolved "https://registry.yarnpkg.com/detective/-/detective-4.3.2.tgz#77697e2e7947ac3fe7c8e26a6d6f115235afa91c" + dependencies: + acorn "^3.1.0" + defined "^1.0.0" + +doctrine@1.5.0, doctrine@^1.2.2: + version "1.5.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-1.5.0.tgz#379dce730f6166f76cefa4e6707a159b02c5a6fa" + dependencies: + esutils "^2.0.2" + isarray "^1.0.0" + +doiuse@^2.4.1: + version "2.5.0" + resolved "https://registry.yarnpkg.com/doiuse/-/doiuse-2.5.0.tgz#c7f156965d054bf4d699a4067af1cadbc7350b7c" + dependencies: + browserslist "^1.1.1" + caniuse-db "^1.0.30000187" + css-rule-stream "^1.1.0" + duplexer2 "0.0.2" + jsonfilter "^1.1.2" + ldjson-stream "^1.2.1" + lodash "^4.0.0" + multimatch "^2.0.0" + postcss "^5.0.8" + source-map "^0.4.2" + through2 "^0.6.3" + yargs "^3.5.4" + +dot-prop@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-3.0.0.tgz#1b708af094a49c9a0e7dbcad790aba539dac1177" + dependencies: + is-obj "^1.0.0" + +duplexer2@0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/duplexer2/-/duplexer2-0.0.2.tgz#c614dcf67e2fb14995a91711e5a617e8a60a31db" + dependencies: + readable-stream "~1.1.9" + +duplexer2@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/duplexer2/-/duplexer2-0.1.4.tgz#8b12dab878c0d69e3e7891051662a32fc6bddcc1" + dependencies: + readable-stream "^2.0.2" + +duplexer@^0.1.1, duplexer@~0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.1.tgz#ace6ff808c1ce66b57d1ebf97977acb02334cfc1" + +duplexify@^3.2.0: + version "3.5.0" + resolved "https://registry.yarnpkg.com/duplexify/-/duplexify-3.5.0.tgz#1aa773002e1578457e9d9d4a50b0ccaaebcbd604" + dependencies: + end-of-stream "1.0.0" + inherits "^2.0.1" + readable-stream "^2.0.0" + stream-shift "^1.0.0" + +ecc-jsbn@~0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz#0fc73a9ed5f0d53c38193398523ef7e543777505" + dependencies: + jsbn "~0.1.0" + +electron-to-chromium@^1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.2.3.tgz#4b4d04d237c301f72e2d15c2137b2b79f9f5ab76" + +end-of-stream@1.0.0, end-of-stream@^1.0.0, end-of-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.0.0.tgz#d4596e702734a93e40e9af864319eabd99ff2f0e" + dependencies: + once "~1.3.0" + +enhance-visitors@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/enhance-visitors/-/enhance-visitors-1.0.0.tgz#aa945d05da465672a1ebd38fee2ed3da8518e95a" + dependencies: + lodash "^4.13.1" + +error-ex@^1.2.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.0.tgz#e67b43f3e82c96ea3a584ffee0b9fc3325d802d9" + dependencies: + is-arrayish "^0.2.1" + +es5-ext@^0.10.7, es5-ext@^0.10.8, es5-ext@~0.10.11, es5-ext@~0.10.2, es5-ext@~0.10.5, es5-ext@~0.10.6, es5-ext@~0.10.7: + version "0.10.12" + resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.12.tgz#aa84641d4db76b62abba5e45fd805ecbab140047" + dependencies: + es6-iterator "2" + es6-symbol "~3.1" + +es6-iterator@2: + version "2.0.0" + resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.0.tgz#bd968567d61635e33c0b80727613c9cb4b096bac" + dependencies: + d "^0.1.1" + es5-ext "^0.10.7" + es6-symbol "3" + +es6-iterator@~0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-0.1.3.tgz#d6f58b8c4fc413c249b4baa19768f8e4d7c8944e" + dependencies: + d "~0.1.1" + es5-ext "~0.10.5" + es6-symbol "~2.0.1" + +es6-map@^0.1.3: + version "0.1.4" + resolved "https://registry.yarnpkg.com/es6-map/-/es6-map-0.1.4.tgz#a34b147be224773a4d7da8072794cefa3632b897" + dependencies: + d "~0.1.1" + es5-ext "~0.10.11" + es6-iterator "2" + es6-set "~0.1.3" + es6-symbol "~3.1.0" + event-emitter "~0.3.4" + +es6-promise@^3.0.2, es6-promise@^3.1.2: + version "3.3.1" + resolved "https://registry.yarnpkg.com/es6-promise/-/es6-promise-3.3.1.tgz#a08cdde84ccdbf34d027a1451bc91d4bcd28a613" + +es6-set@~0.1.3: + version "0.1.4" + resolved "https://registry.yarnpkg.com/es6-set/-/es6-set-0.1.4.tgz#9516b6761c2964b92ff479456233a247dc707ce8" + dependencies: + d "~0.1.1" + es5-ext "~0.10.11" + es6-iterator "2" + es6-symbol "3" + event-emitter "~0.3.4" + +es6-symbol@3, es6-symbol@~3.1, es6-symbol@~3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.0.tgz#94481c655e7a7cad82eba832d97d5433496d7ffa" + dependencies: + d "~0.1.1" + es5-ext "~0.10.11" + +es6-symbol@~2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-2.0.1.tgz#761b5c67cfd4f1d18afb234f691d678682cb3bf3" + dependencies: + d "~0.1.1" + es5-ext "~0.10.5" + +es6-weak-map@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/es6-weak-map/-/es6-weak-map-2.0.1.tgz#0d2bbd8827eb5fb4ba8f97fbfea50d43db21ea81" + dependencies: + d "^0.1.1" + es5-ext "^0.10.8" + es6-iterator "2" + es6-symbol "3" + +es6-weak-map@~0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/es6-weak-map/-/es6-weak-map-0.1.4.tgz#706cef9e99aa236ba7766c239c8b9e286ea7d228" + dependencies: + d "~0.1.1" + es5-ext "~0.10.6" + es6-iterator "~0.1.3" + es6-symbol "~2.0.1" + +escape-string-regexp@^1.0.0, escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + +escope@^3.6.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/escope/-/escope-3.6.0.tgz#e01975e812781a163a6dadfdd80398dc64c889c3" + dependencies: + es6-map "^0.1.3" + es6-weak-map "^2.0.1" + esrecurse "^4.1.0" + estraverse "^4.1.1" + +eslint-config-xo@^0.17.0: + version "0.17.0" + resolved "https://registry.yarnpkg.com/eslint-config-xo/-/eslint-config-xo-0.17.0.tgz#1e7d4a86bf49179805c4622e832a7b1beeb4e881" + +eslint-formatter-pretty@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/eslint-formatter-pretty/-/eslint-formatter-pretty-1.1.0.tgz#ab4d06da02fed8c13ae9f0dc540a433ef7ed6f5e" + dependencies: + ansi-escapes "^1.4.0" + chalk "^1.1.3" + log-symbols "^1.0.2" + plur "^2.1.2" + string-width "^2.0.0" + +eslint-import-resolver-node@^0.2.0: + version "0.2.3" + resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.2.3.tgz#5add8106e8c928db2cba232bcd9efa846e3da16c" + dependencies: + debug "^2.2.0" + object-assign "^4.0.1" + resolve "^1.1.6" + +eslint-module-utils@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.0.0.tgz#a6f8c21d901358759cdc35dbac1982ae1ee58bce" + dependencies: + debug "2.2.0" + pkg-dir "^1.0.0" + +eslint-plugin-ava@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-ava/-/eslint-plugin-ava-3.1.1.tgz#fdcf1ad9605867639ae0007d58100ee40a6de25d" + dependencies: + arrify "^1.0.1" + deep-strict-equal "^0.2.0" + enhance-visitors "^1.0.0" + espree "^3.1.3" + espurify "^1.5.0" + multimatch "^2.1.0" + pkg-up "^1.0.0" + req-all "^0.1.0" + +eslint-plugin-import@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.2.0.tgz#72ba306fad305d67c4816348a4699a4229ac8b4e" + dependencies: + builtin-modules "^1.1.1" + contains-path "^0.1.0" + debug "^2.2.0" + doctrine "1.5.0" + eslint-import-resolver-node "^0.2.0" + eslint-module-utils "^2.0.0" + has "^1.0.1" + lodash.cond "^4.3.0" + minimatch "^3.0.3" + pkg-up "^1.0.0" + +eslint-plugin-no-use-extend-native@^0.3.2: + version "0.3.12" + resolved "https://registry.yarnpkg.com/eslint-plugin-no-use-extend-native/-/eslint-plugin-no-use-extend-native-0.3.12.tgz#3ad9a00c2df23b5d7f7f6be91550985a4ab701ea" + dependencies: + is-get-set-prop "^1.0.0" + is-js-type "^2.0.0" + is-obj-prop "^1.0.0" + is-proto-prop "^1.0.0" + +eslint-plugin-promise@^3.0.0: + version "3.4.2" + resolved "https://registry.yarnpkg.com/eslint-plugin-promise/-/eslint-plugin-promise-3.4.2.tgz#1be2793eafe2d18b5b123b8136c269f804fe7122" + +eslint-plugin-unicorn@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-unicorn/-/eslint-plugin-unicorn-1.0.0.tgz#b761ad233d34d164cda5c41217571609bd1ac161" + dependencies: + lodash.camelcase "^4.1.1" + lodash.kebabcase "^4.0.1" + lodash.snakecase "^4.0.1" + lodash.upperfirst "^4.2.0" + req-all "^0.1.0" + +eslint@^3.6.0: + version "3.16.1" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-3.16.1.tgz#9bc31fc7341692cf772e80607508f67d711c5609" + dependencies: + babel-code-frame "^6.16.0" + chalk "^1.1.3" + concat-stream "^1.4.6" + debug "^2.1.1" + doctrine "^1.2.2" + escope "^3.6.0" + espree "^3.4.0" + estraverse "^4.2.0" + esutils "^2.0.2" + file-entry-cache "^2.0.0" + glob "^7.0.3" + globals "^9.14.0" + ignore "^3.2.0" + imurmurhash "^0.1.4" + inquirer "^0.12.0" + is-my-json-valid "^2.10.0" + is-resolvable "^1.0.0" + js-yaml "^3.5.1" + json-stable-stringify "^1.0.0" + levn "^0.3.0" + lodash "^4.0.0" + mkdirp "^0.5.0" + natural-compare "^1.4.0" + optionator "^0.8.2" + path-is-inside "^1.0.1" + pluralize "^1.2.1" + progress "^1.1.8" + require-uncached "^1.0.2" + shelljs "^0.7.5" + strip-bom "^3.0.0" + strip-json-comments "~2.0.1" + table "^3.7.8" + text-table "~0.2.0" + user-home "^2.0.0" + +espree@^3.1.3, espree@^3.4.0: + version "3.4.0" + resolved "https://registry.yarnpkg.com/espree/-/espree-3.4.0.tgz#41656fa5628e042878025ef467e78f125cb86e1d" + dependencies: + acorn "4.0.4" + acorn-jsx "^3.0.0" + +esprima@^3.1.1: + version "3.1.3" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-3.1.3.tgz#fdca51cee6133895e3c88d535ce49dbff62a4633" + +"esprima@~ 1.0.2", esprima@~1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-1.0.4.tgz#9f557e08fc3b4d26ece9dd34f8fbf476b62585ad" + +espurify@^1.5.0: + version "1.6.1" + resolved "https://registry.yarnpkg.com/espurify/-/espurify-1.6.1.tgz#a618c3b320071a4e9e7136c5d78717cdd07020da" + dependencies: + core-js "^2.0.0" + +esrecurse@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.1.0.tgz#4713b6536adf7f2ac4f327d559e7756bff648220" + dependencies: + estraverse "~4.1.0" + object-assign "^4.0.1" + +estraverse@^4.1.1, estraverse@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.2.0.tgz#0dee3fed31fcd469618ce7342099fc1afa0bdb13" + +estraverse@~4.1.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.1.1.tgz#f6caca728933a850ef90661d0e17982ba47111a2" + +esutils@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b" + +event-emitter@~0.3.4: + version "0.3.4" + resolved "https://registry.yarnpkg.com/event-emitter/-/event-emitter-0.3.4.tgz#8d63ddfb4cfe1fae3b32ca265c4c720222080bb5" + dependencies: + d "~0.1.1" + es5-ext "~0.10.7" + +eventemitter2@~0.4.13: + version "0.4.14" + resolved "https://registry.yarnpkg.com/eventemitter2/-/eventemitter2-0.4.14.tgz#8f61b75cde012b2e9eb284d4545583b5643b61ab" + +execa@^0.5.0: + version "0.5.1" + resolved "https://registry.yarnpkg.com/execa/-/execa-0.5.1.tgz#de3fb85cb8d6e91c85bcbceb164581785cb57b36" + dependencies: + cross-spawn "^4.0.0" + get-stream "^2.2.0" + is-stream "^1.1.0" + npm-run-path "^2.0.0" + p-finally "^1.0.0" + signal-exit "^3.0.0" + strip-eof "^1.0.0" + +execall@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/execall/-/execall-1.0.0.tgz#73d0904e395b3cab0658b08d09ec25307f29bb73" + dependencies: + clone-regexp "^1.0.0" + +exit-hook@^1.0.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/exit-hook/-/exit-hook-1.1.1.tgz#f05ca233b48c05d54fff07765df8507e95c02ff8" + +exit@~0.1.1: + version "0.1.2" + resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" + +expand-brackets@^0.1.4: + version "0.1.5" + resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-0.1.5.tgz#df07284e342a807cd733ac5af72411e581d1177b" + dependencies: + is-posix-bracket "^0.1.0" + +expand-range@^1.8.1: + version "1.8.2" + resolved "https://registry.yarnpkg.com/expand-range/-/expand-range-1.8.2.tgz#a299effd335fe2721ebae8e257ec79644fc85337" + dependencies: + fill-range "^2.1.0" + +extend@~3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.0.tgz#5a474353b9f3353ddd8176dfd37b91c83a46f1d4" + +extglob@^0.3.1: + version "0.3.2" + resolved "https://registry.yarnpkg.com/extglob/-/extglob-0.3.2.tgz#2e18ff3d2f49ab2765cec9023f011daa8d8349a1" + dependencies: + is-extglob "^1.0.0" + +extsprintf@1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.0.2.tgz#e1080e0658e300b06294990cc70e1502235fd550" + +fast-levenshtein@~2.0.4: + version "2.0.6" + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + +figures@^1.0.1, figures@^1.3.2, figures@^1.3.5: + version "1.7.0" + resolved "https://registry.yarnpkg.com/figures/-/figures-1.7.0.tgz#cbe1e3affcf1cd44b80cadfed28dc793a9701d2e" + dependencies: + escape-string-regexp "^1.0.5" + object-assign "^4.1.0" + +file-entry-cache@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-2.0.0.tgz#c392990c3e684783d838b8c84a45d8a048458361" + dependencies: + flat-cache "^1.2.1" + object-assign "^4.0.1" + +file-sync-cmp@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/file-sync-cmp/-/file-sync-cmp-0.1.1.tgz#a5e7a8ffbfa493b43b923bbd4ca89a53b63b612b" + +filename-regex@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/filename-regex/-/filename-regex-2.0.0.tgz#996e3e80479b98b9897f15a8a58b3d084e926775" + +filesize@~3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/filesize/-/filesize-3.2.1.tgz#a06f1c5497ed6358057c415e53403f764c1fb5e6" + +fill-range@^2.1.0: + version "2.2.3" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-2.2.3.tgz#50b77dfd7e469bc7492470963699fe7a8485a723" + dependencies: + is-number "^2.1.0" + isobject "^2.0.0" + randomatic "^1.1.3" + repeat-element "^1.1.2" + repeat-string "^1.5.2" + +filled-array@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/filled-array/-/filled-array-1.1.0.tgz#c3c4f6c663b923459a9aa29912d2d031f1507f84" + +find-up@^1.0.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f" + dependencies: + path-exists "^2.0.0" + pinkie-promise "^2.0.0" + +find-up@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7" + dependencies: + locate-path "^2.0.0" + +findup-sync@~0.1.0, findup-sync@~0.1.2: + version "0.1.3" + resolved "https://registry.yarnpkg.com/findup-sync/-/findup-sync-0.1.3.tgz#7f3e7a97b82392c653bf06589bd85190e93c3683" + dependencies: + glob "~3.2.9" + lodash "~2.4.1" + +flat-cache@^1.2.1: + version "1.2.2" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-1.2.2.tgz#fa86714e72c21db88601761ecf2f555d1abc6b96" + dependencies: + circular-json "^0.3.1" + del "^2.0.2" + graceful-fs "^4.1.2" + write "^0.2.1" + +flatten@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/flatten/-/flatten-1.0.2.tgz#dae46a9d78fbe25292258cc1e780a41d95c03782" + +for-in@^0.1.5: + version "0.1.6" + resolved "https://registry.yarnpkg.com/for-in/-/for-in-0.1.6.tgz#c9f96e89bfad18a545af5ec3ed352a1d9e5b4dc8" + +for-own@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/for-own/-/for-own-0.1.4.tgz#0149b41a39088c7515f51ebe1c1386d45f935072" + dependencies: + for-in "^0.1.5" + +forever-agent@~0.5.0: + version "0.5.2" + resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.5.2.tgz#6d0e09c4921f94a27f63d3b49c5feff1ea4c5130" + +forever-agent@~0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" + +form-data@~0.1.0: + version "0.1.4" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-0.1.4.tgz#91abd788aba9702b1aabfa8bc01031a2ac9e3b12" + dependencies: + async "~0.9.0" + combined-stream "~0.0.4" + mime "~1.2.11" + +form-data@~0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-0.2.0.tgz#26f8bc26da6440e299cbdcfb69035c4f77a6e466" + dependencies: + async "~0.9.0" + combined-stream "~0.0.4" + mime-types "~2.0.3" + +form-data@~2.1.1: + version "2.1.2" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.1.2.tgz#89c3534008b97eada4cbb157d58f6f5df025eae4" + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.5" + mime-types "^2.1.12" + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + +fstream-ignore@~1.0.1: + version "1.0.5" + resolved "https://registry.yarnpkg.com/fstream-ignore/-/fstream-ignore-1.0.5.tgz#9c31dae34767018fe1d249b24dada67d092da105" + dependencies: + fstream "^1.0.0" + inherits "2" + minimatch "^3.0.0" + +fstream@^1.0.0, fstream@~1.0.2: + version "1.0.10" + resolved "https://registry.yarnpkg.com/fstream/-/fstream-1.0.10.tgz#604e8a92fe26ffd9f6fae30399d4984e1ab22822" + dependencies: + graceful-fs "^4.1.2" + inherits "~2.0.0" + mkdirp ">=0.5 0" + rimraf "2" + +function-bind@^1.0.2: + version "1.1.0" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.0.tgz#16176714c801798e4e8f2cf7f7529467bb4a5771" + +gather-stream@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/gather-stream/-/gather-stream-1.0.0.tgz#b33994af457a8115700d410f317733cbe7a0904b" + +generate-function@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/generate-function/-/generate-function-2.0.0.tgz#6858fe7c0969b7d4e9093337647ac79f60dfbe74" + +generate-object-property@^1.1.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/generate-object-property/-/generate-object-property-1.2.0.tgz#9c0e1c40308ce804f4783618b937fa88f99d50d0" + dependencies: + is-property "^1.0.0" + +get-caller-file@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.2.tgz#f702e63127e7e231c160a80c1554acb70d5047e5" + +get-set-props@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/get-set-props/-/get-set-props-0.1.0.tgz#998475c178445686d0b32246da5df8dbcfbe8ea3" + +get-stdin@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-4.0.1.tgz#b968c6b0a04384324902e8bf1a5df32579a450fe" + +get-stdin@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-5.0.1.tgz#122e161591e21ff4c52530305693f20e6393a398" + +get-stream@^2.2.0: + version "2.3.1" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-2.3.1.tgz#5f38f93f346009666ee0150a054167f91bdd95de" + dependencies: + object-assign "^4.0.1" + pinkie-promise "^2.0.0" + +getobject@~0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/getobject/-/getobject-0.1.0.tgz#047a449789fa160d018f5486ed91320b6ec7885c" + +getpass@^0.1.1: + version "0.1.6" + resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.6.tgz#283ffd9fc1256840875311c1b60e8c40187110e6" + dependencies: + assert-plus "^1.0.0" + +glob-base@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/glob-base/-/glob-base-0.3.0.tgz#dbb164f6221b1c0b1ccf82aea328b497df0ea3c4" + dependencies: + glob-parent "^2.0.0" + is-glob "^2.0.0" + +glob-parent@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-2.0.0.tgz#81383d72db054fcccf5336daa902f182f6edbb28" + dependencies: + is-glob "^2.0.0" + +glob@^7.0.0, glob@^7.0.3, glob@^7.0.5: + version "7.1.1" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.1.tgz#805211df04faaf1c63a3600306cdf5ade50b2ec8" + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.2" + once "^1.3.0" + path-is-absolute "^1.0.0" + +glob@~3.1.21: + version "3.1.21" + resolved "https://registry.yarnpkg.com/glob/-/glob-3.1.21.tgz#d29e0a055dea5138f4d07ed40e8982e83c2066cd" + dependencies: + graceful-fs "~1.2.0" + inherits "1" + minimatch "~0.2.11" + +glob@~3.2.9: + version "3.2.11" + resolved "https://registry.yarnpkg.com/glob/-/glob-3.2.11.tgz#4a973f635b9190f715d10987d5c00fd2815ebe3d" + dependencies: + inherits "2" + minimatch "0.3" + +glob@~4.0.2: + version "4.0.6" + resolved "https://registry.yarnpkg.com/glob/-/glob-4.0.6.tgz#695c50bdd4e2fb5c5d370b091f388d3707e291a7" + dependencies: + graceful-fs "^3.0.2" + inherits "2" + minimatch "^1.0.0" + once "^1.3.0" + +globals@^9.14.0: + version "9.16.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-9.16.0.tgz#63e903658171ec2d9f51b1d31de5e2b8dc01fb80" + +globby@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/globby/-/globby-5.0.0.tgz#ebd84667ca0dbb330b99bcfc68eac2bc54370e0d" + dependencies: + array-union "^1.0.1" + arrify "^1.0.0" + glob "^7.0.3" + object-assign "^4.0.1" + pify "^2.0.0" + pinkie-promise "^2.0.0" + +globby@^6.0.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c" + dependencies: + array-union "^1.0.1" + glob "^7.0.3" + object-assign "^4.0.1" + pify "^2.0.0" + pinkie-promise "^2.0.0" + +globjoin@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/globjoin/-/globjoin-0.1.4.tgz#2f4494ac8919e3767c5cbb691e9f463324285d43" + +got@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/got/-/got-0.3.0.tgz#888ec66ca4bc735ab089dbe959496d0f79485493" + dependencies: + object-assign "^0.3.0" + +got@^3.2.0: + version "3.3.1" + resolved "https://registry.yarnpkg.com/got/-/got-3.3.1.tgz#e5d0ed4af55fc3eef4d56007769d98192bcb2eca" + dependencies: + duplexify "^3.2.0" + infinity-agent "^2.0.0" + is-redirect "^1.0.0" + is-stream "^1.0.0" + lowercase-keys "^1.0.0" + nested-error-stacks "^1.0.0" + object-assign "^3.0.0" + prepend-http "^1.0.0" + read-all-stream "^3.0.0" + timed-out "^2.0.0" + +got@^5.0.0: + version "5.7.1" + resolved "https://registry.yarnpkg.com/got/-/got-5.7.1.tgz#5f81635a61e4a6589f180569ea4e381680a51f35" + dependencies: + create-error-class "^3.0.1" + duplexer2 "^0.1.4" + is-redirect "^1.0.0" + is-retry-allowed "^1.0.0" + is-stream "^1.0.0" + lowercase-keys "^1.0.0" + node-status-codes "^1.0.0" + object-assign "^4.0.1" + parse-json "^2.1.0" + pinkie-promise "^2.0.0" + read-all-stream "^3.0.0" + readable-stream "^2.0.5" + timed-out "^3.0.0" + unzip-response "^1.0.2" + url-parse-lax "^1.0.0" + +graceful-fs@^3.0.1, graceful-fs@^3.0.2, graceful-fs@~3.0.0, graceful-fs@~3.0.1: + version "3.0.11" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-3.0.11.tgz#7613c778a1afea62f25c630a086d7f3acbbdd818" + dependencies: + natives "^1.1.0" + +graceful-fs@^4.1.11, graceful-fs@^4.1.2: + version "4.1.11" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.1.11.tgz#0e8bdfe4d1ddb8854d64e04ea7c00e2a026e5658" + +graceful-fs@~1.1: + version "1.1.14" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-1.1.14.tgz#07078db5f6377f6321fceaaedf497de124dc9465" + +graceful-fs@~1.2.0: + version "1.2.3" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-1.2.3.tgz#15a4806a57547cb2d2dbf27f42e89a8c3451b364" + +graceful-fs@~2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-2.0.3.tgz#7cd2cdb228a4a3f36e95efa6cc142de7d1a136d0" + +"graceful-readlink@>= 1.0.0": + version "1.0.1" + resolved "https://registry.yarnpkg.com/graceful-readlink/-/graceful-readlink-1.0.1.tgz#4cafad76bc62f02fa039b2f94e9a3dd3a391a725" + +grunt-bower-concat@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/grunt-bower-concat/-/grunt-bower-concat-1.0.0.tgz#f430c7b718704c6815215c6ca94d2fd5dd4a7b5b" + dependencies: + async "~1.5.2" + bower "~1.7.7" + detective "~4.3.1" + filesize "~3.2.1" + lodash "~4.3.0" + underscore.string "~3.2.3" + +grunt-bower-task@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/grunt-bower-task/-/grunt-bower-task-0.4.0.tgz#daea0a55682a79a8c79db895b79be6f3ecb65817" + dependencies: + async "~0.1.22" + bower "~1.3.0" + colors "~0.6.0-1" + lodash "~0.10.0" + rimraf "~2.0.2" + wrench "~1.4.3" + +grunt-cli@^0.1.13: + version "0.1.13" + resolved "https://registry.yarnpkg.com/grunt-cli/-/grunt-cli-0.1.13.tgz#e9ebc4047631f5012d922770c39378133cad10f4" + dependencies: + findup-sync "~0.1.0" + nopt "~1.0.10" + resolve "~0.3.1" + +grunt-contrib-clean@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/grunt-contrib-clean/-/grunt-contrib-clean-1.0.0.tgz#6b2ed94117e2c7ffe32ee04578c96fe4625a9b6d" + dependencies: + async "^1.5.2" + rimraf "^2.5.1" + +grunt-contrib-copy@^0.8.2: + version "0.8.2" + resolved "https://registry.yarnpkg.com/grunt-contrib-copy/-/grunt-contrib-copy-0.8.2.tgz#df31c90ffcc409bc9fafe44ec0dd1e4259916fea" + dependencies: + chalk "^1.1.1" + file-sync-cmp "^0.1.0" + +grunt-contrib-cssmin@^0.14.0: + version "0.14.0" + resolved "https://registry.yarnpkg.com/grunt-contrib-cssmin/-/grunt-contrib-cssmin-0.14.0.tgz#88b0a92536969bb566281c5c61ec5062d833f3b7" + dependencies: + chalk "^1.0.0" + clean-css "~3.4.2" + maxmin "^1.1.0" + +grunt-contrib-uglify@^0.11.1: + version "0.11.1" + resolved "https://registry.yarnpkg.com/grunt-contrib-uglify/-/grunt-contrib-uglify-0.11.1.tgz#5e22a2f676cd11d871fc2a0f08aa9b2973045325" + dependencies: + chalk "^1.0.0" + lodash "^4.0.1" + maxmin "^2.0.0" + uglify-js "~2.6.0" + uri-path "^1.0.0" + +grunt-legacy-log-utils@~0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/grunt-legacy-log-utils/-/grunt-legacy-log-utils-0.1.1.tgz#c0706b9dd9064e116f36f23fe4e6b048672c0f7e" + dependencies: + colors "~0.6.2" + lodash "~2.4.1" + underscore.string "~2.3.3" + +grunt-legacy-log@~0.1.0: + version "0.1.3" + resolved "https://registry.yarnpkg.com/grunt-legacy-log/-/grunt-legacy-log-0.1.3.tgz#ec29426e803021af59029f87d2f9cd7335a05531" + dependencies: + colors "~0.6.2" + grunt-legacy-log-utils "~0.1.1" + hooker "~0.2.3" + lodash "~2.4.1" + underscore.string "~2.3.3" + +grunt-legacy-util@~0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/grunt-legacy-util/-/grunt-legacy-util-0.2.0.tgz#93324884dbf7e37a9ff7c026dff451d94a9e554b" + dependencies: + async "~0.1.22" + exit "~0.1.1" + getobject "~0.1.0" + hooker "~0.2.3" + lodash "~0.9.2" + underscore.string "~2.2.1" + which "~1.0.5" + +grunt@^0.4.5: + version "0.4.5" + resolved "https://registry.yarnpkg.com/grunt/-/grunt-0.4.5.tgz#56937cd5194324adff6d207631832a9d6ba4e7f0" + dependencies: + async "~0.1.22" + coffee-script "~1.3.3" + colors "~0.6.2" + dateformat "1.0.2-1.2.3" + eventemitter2 "~0.4.13" + exit "~0.1.1" + findup-sync "~0.1.2" + getobject "~0.1.0" + glob "~3.1.21" + grunt-legacy-log "~0.1.0" + grunt-legacy-util "~0.2.0" + hooker "~0.2.3" + iconv-lite "~0.2.11" + js-yaml "~2.0.5" + lodash "~0.9.2" + minimatch "~0.2.12" + nopt "~1.0.10" + rimraf "~2.2.8" + underscore.string "~2.2.1" + which "~1.0.5" + +gzip-size@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/gzip-size/-/gzip-size-1.0.0.tgz#66cf8b101047227b95bace6ea1da0c177ed5c22f" + dependencies: + browserify-zlib "^0.1.4" + concat-stream "^1.4.1" + +gzip-size@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/gzip-size/-/gzip-size-3.0.0.tgz#546188e9bdc337f673772f81660464b389dce520" + dependencies: + duplexer "^0.1.1" + +handlebars@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-2.0.0.tgz#6e9d7f8514a3467fa5e9f82cc158ecfc1d5ac76f" + dependencies: + optimist "~0.3" + optionalDependencies: + uglify-js "~2.3" + +har-validator@~2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-2.0.6.tgz#cdcbc08188265ad119b6a5a7c8ab70eecfb5d27d" + dependencies: + chalk "^1.1.1" + commander "^2.9.0" + is-my-json-valid "^2.12.4" + pinkie-promise "^2.0.0" + +has-ansi@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-0.1.0.tgz#84f265aae8c0e6a88a12d7022894b7568894c62e" + dependencies: + ansi-regex "^0.2.0" + +has-ansi@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" + dependencies: + ansi-regex "^2.0.0" + +has-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-1.0.0.tgz#9d9e793165ce017a00f00418c43f942a7b1d11fa" + +has-flag@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-2.0.0.tgz#e8207af1cc7b30d446cc70b734b5e8be18f88d51" + +has@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/has/-/has-1.0.1.tgz#8461733f538b0837c9361e39a9ab9e9704dc2f28" + dependencies: + function-bind "^1.0.2" + +hasbin@^1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/hasbin/-/hasbin-1.2.3.tgz#78c5926893c80215c2b568ae1fd3fcab7a2696b0" + dependencies: + async "~1.5" + +hawk@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/hawk/-/hawk-1.1.1.tgz#87cd491f9b46e4e2aeaca335416766885d2d1ed9" + dependencies: + boom "0.4.x" + cryptiles "0.2.x" + hoek "0.9.x" + sntp "0.2.x" + +hawk@~3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/hawk/-/hawk-3.1.3.tgz#078444bd7c1640b0fe540d2c9b73d59678e8e1c4" + dependencies: + boom "2.x.x" + cryptiles "2.x.x" + hoek "2.x.x" + sntp "1.x.x" + +hoek@0.9.x: + version "0.9.1" + resolved "https://registry.yarnpkg.com/hoek/-/hoek-0.9.1.tgz#3d322462badf07716ea7eb85baf88079cddce505" + +hoek@2.x.x: + version "2.16.3" + resolved "https://registry.yarnpkg.com/hoek/-/hoek-2.16.3.tgz#20bb7403d3cea398e91dc4710a8ff1b8274a25ed" + +hooker@~0.2.3: + version "0.2.3" + resolved "https://registry.yarnpkg.com/hooker/-/hooker-0.2.3.tgz#b834f723cc4a242aa65963459df6d984c5d3d959" + +hosted-git-info@^2.1.4: + version "2.2.0" + resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.2.0.tgz#7a0d097863d886c0fabbdcd37bf1758d8becf8a5" + +html-tags@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/html-tags/-/html-tags-1.1.1.tgz#869f43859f12d9bdc3892419e494a628aa1b204e" + +http-signature@~0.10.0: + version "0.10.1" + resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-0.10.1.tgz#4fbdac132559aa8323121e540779c0a012b27e66" + dependencies: + asn1 "0.1.11" + assert-plus "^0.1.5" + ctype "0.5.3" + +http-signature@~1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.1.1.tgz#df72e267066cd0ac67fb76adf8e134a8fbcf91bf" + dependencies: + assert-plus "^0.2.0" + jsprim "^1.2.2" + sshpk "^1.7.0" + +iconv-lite@~0.2.11: + version "0.2.11" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.2.11.tgz#1ce60a3a57864a292d1321ff4609ca4bb965adc8" + +ignore@^3.2.0: + version "3.2.4" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.2.4.tgz#4055e03596729a8fabe45a43c100ad5ed815c4e8" + +imurmurhash@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + +indent-string@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-2.1.0.tgz#8e2d48348742121b4a8218b7a137e9a52049dc80" + dependencies: + repeating "^2.0.0" + +indexes-of@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/indexes-of/-/indexes-of-1.0.1.tgz#f30f716c8e2bd346c7b67d3df3915566a7c05607" + +infinity-agent@^2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/infinity-agent/-/infinity-agent-2.0.3.tgz#45e0e2ff7a9eb030b27d62b74b3744b7a7ac4216" + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-1.0.2.tgz#ca4309dadee6b54cc0b8d247e8d7c7a0975bdc9b" + +inherits@2, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.0, inherits@~2.0.1: + version "2.0.3" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" + +ini@1.x.x, ini@^1.2.0, ini@^1.3.4, ini@~1.3.0: + version "1.3.4" + resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.4.tgz#0537cb79daf59b59a1a517dff706c86ec039162e" + +inquirer@0.7.1: + version "0.7.1" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-0.7.1.tgz#b8acf140165bd581862ed1198fb6d26430091fac" + dependencies: + chalk "^0.5.0" + cli-color "~0.3.2" + figures "^1.3.2" + lodash "~2.4.1" + mute-stream "0.0.4" + readline2 "~0.1.0" + rx "^2.2.27" + through "~2.3.4" + +inquirer@1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-1.0.3.tgz#ebe3a0948571bcc46ccccbe2f9bcec251e984bd0" + dependencies: + ansi-escapes "^1.1.0" + chalk "^1.0.0" + cli-cursor "^1.0.1" + cli-width "^2.0.0" + figures "^1.3.5" + lodash "^4.3.0" + mute-stream "0.0.6" + pinkie-promise "^2.0.0" + run-async "^2.2.0" + rx "^4.1.0" + string-width "^1.0.1" + strip-ansi "^3.0.0" + through "^2.3.6" + +inquirer@^0.12.0: + version "0.12.0" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-0.12.0.tgz#1ef2bfd63504df0bc75785fff8c2c41df12f077e" + dependencies: + ansi-escapes "^1.1.0" + ansi-regex "^2.0.0" + chalk "^1.0.0" + cli-cursor "^1.0.1" + cli-width "^2.0.0" + figures "^1.3.5" + lodash "^4.3.0" + readline2 "^1.0.1" + run-async "^0.1.0" + rx-lite "^3.1.2" + string-width "^1.0.1" + strip-ansi "^3.0.0" + through "^2.3.6" + +inquirer@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-0.6.0.tgz#614d7bb3e48f9e6a8028e94a0c38f23ef29823d3" + dependencies: + chalk "^0.5.0" + cli-color "~0.3.2" + lodash "~2.4.1" + mute-stream "0.0.4" + readline2 "~0.1.0" + rx "^2.2.27" + through "~2.3.4" + +insight@0.4.3: + version "0.4.3" + resolved "https://registry.yarnpkg.com/insight/-/insight-0.4.3.tgz#76d653c5c0d8048b03cdba6385a6948f74614af0" + dependencies: + async "^0.9.0" + chalk "^0.5.1" + configstore "^0.3.1" + inquirer "^0.6.0" + lodash.debounce "^2.4.1" + object-assign "^1.0.0" + os-name "^1.0.0" + request "^2.40.0" + tough-cookie "^0.12.1" + +interpret@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.0.1.tgz#d579fb7f693b858004947af39fa0db49f795602c" + +intersect@~0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/intersect/-/intersect-0.0.3.tgz#c1a4a5e5eac6ede4af7504cc07e0ada7bc9f4920" + +invert-kv@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-1.0.0.tgz#104a8e4aaca6d3d8cd157a8ef8bfab2d7a3ffdb6" + +irregular-plurals@^1.0.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/irregular-plurals/-/irregular-plurals-1.2.0.tgz#38f299834ba8c00c30be9c554e137269752ff3ac" + +is-arrayish@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" + +is-buffer@^1.0.2: + version "1.1.4" + resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.4.tgz#cfc86ccd5dc5a52fa80489111c6920c457e2d98b" + +is-builtin-module@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-builtin-module/-/is-builtin-module-1.0.0.tgz#540572d34f7ac3119f8f76c30cbc1b1e037affbe" + dependencies: + builtin-modules "^1.0.0" + +is-dotfile@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-dotfile/-/is-dotfile-1.0.2.tgz#2c132383f39199f8edc268ca01b9b007d205cc4d" + +is-equal-shallow@^0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/is-equal-shallow/-/is-equal-shallow-0.1.3.tgz#2238098fc221de0bcfa5d9eac4c45d638aa1c534" + dependencies: + is-primitive "^2.0.0" + +is-error@^2.2.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/is-error/-/is-error-2.2.1.tgz#684a96d84076577c98f4cdb40c6d26a5123bf19c" + +is-extendable@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" + +is-extglob@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-1.0.0.tgz#ac468177c4943405a092fc8f29760c6ffc6206c0" + +is-finite@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-finite/-/is-finite-1.0.2.tgz#cc6677695602be550ef11e8b4aa6305342b6d0aa" + dependencies: + number-is-nan "^1.0.0" + +is-fullwidth-code-point@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" + dependencies: + number-is-nan "^1.0.0" + +is-fullwidth-code-point@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" + +is-get-set-prop@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-get-set-prop/-/is-get-set-prop-1.0.0.tgz#2731877e4d78a6a69edcce6bb9d68b0779e76312" + dependencies: + get-set-props "^0.1.0" + lowercase-keys "^1.0.0" + +is-glob@^2.0.0, is-glob@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-2.0.1.tgz#d096f926a3ded5600f3fdfd91198cb0888c2d863" + dependencies: + is-extglob "^1.0.0" + +is-js-type@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-js-type/-/is-js-type-2.0.0.tgz#73617006d659b4eb4729bba747d28782df0f7e22" + dependencies: + js-types "^1.0.0" + +is-my-json-valid@^2.10.0, is-my-json-valid@^2.12.4: + version "2.15.0" + resolved "https://registry.yarnpkg.com/is-my-json-valid/-/is-my-json-valid-2.15.0.tgz#936edda3ca3c211fd98f3b2d3e08da43f7b2915b" + dependencies: + generate-function "^2.0.0" + generate-object-property "^1.1.0" + jsonpointer "^4.0.0" + xtend "^4.0.0" + +is-npm@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-npm/-/is-npm-1.0.0.tgz#f2fb63a65e4905b406c86072765a1a4dc793b9f4" + +is-number@^2.0.2, is-number@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-2.1.0.tgz#01fcbbb393463a548f2f466cce16dece49db908f" + dependencies: + kind-of "^3.0.2" + +is-obj-prop@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-obj-prop/-/is-obj-prop-1.0.0.tgz#b34de79c450b8d7c73ab2cdf67dc875adb85f80e" + dependencies: + lowercase-keys "^1.0.0" + obj-props "^1.0.0" + +is-obj@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f" + +is-path-cwd@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-1.0.0.tgz#d225ec23132e89edd38fda767472e62e65f1106d" + +is-path-in-cwd@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-path-in-cwd/-/is-path-in-cwd-1.0.0.tgz#6477582b8214d602346094567003be8a9eac04dc" + dependencies: + is-path-inside "^1.0.0" + +is-path-inside@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-1.0.0.tgz#fc06e5a1683fbda13de667aff717bbc10a48f37f" + dependencies: + path-is-inside "^1.0.1" + +is-plain-obj@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" + +is-posix-bracket@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/is-posix-bracket/-/is-posix-bracket-0.1.1.tgz#3334dc79774368e92f016e6fbc0a88f5cd6e6bc4" + +is-primitive@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-primitive/-/is-primitive-2.0.0.tgz#207bab91638499c07b2adf240a41a87210034575" + +is-promise@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.1.0.tgz#79a2a9ece7f096e80f36d2b2f3bc16c1ff4bf3fa" + +is-property@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-property/-/is-property-1.0.2.tgz#57fe1c4e48474edd65b09911f26b1cd4095dda84" + +is-proto-prop@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-proto-prop/-/is-proto-prop-1.0.0.tgz#b3951f95c089924fb5d4fcda6542ab3e83e2b220" + dependencies: + lowercase-keys "^1.0.0" + proto-props "^0.2.0" + +is-redirect@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-redirect/-/is-redirect-1.0.0.tgz#1d03dded53bd8db0f30c26e4f95d36fc7c87dc24" + +is-regexp@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-regexp/-/is-regexp-1.0.0.tgz#fd2d883545c46bac5a633e7b9a09e87fa2cb5069" + +is-resolvable@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-resolvable/-/is-resolvable-1.0.0.tgz#8df57c61ea2e3c501408d100fb013cf8d6e0cc62" + dependencies: + tryit "^1.0.1" + +is-retry-allowed@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-retry-allowed/-/is-retry-allowed-1.1.0.tgz#11a060568b67339444033d0125a61a20d564fb34" + +is-root@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-root/-/is-root-1.0.0.tgz#07b6c233bc394cd9d02ba15c966bd6660d6342d5" + +is-stream@^1.0.0, is-stream@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" + +is-supported-regexp-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-supported-regexp-flag/-/is-supported-regexp-flag-1.0.0.tgz#8b520c85fae7a253382d4b02652e045576e13bb8" + +is-typedarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" + +is-utf8@^0.2.0: + version "0.2.1" + resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72" + +isarray@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" + +isarray@1.0.0, isarray@^1.0.0, isarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + +isexe@^1.1.1: + version "1.1.2" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-1.1.2.tgz#36f3e22e60750920f5e7241a476a8c6a42275ad0" + +isobject@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" + dependencies: + isarray "1.0.0" + +isstream@~0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" + +jodid25519@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/jodid25519/-/jodid25519-1.0.2.tgz#06d4912255093419477d425633606e0e90782967" + dependencies: + jsbn "~0.1.0" + +js-base64@^2.1.9: + version "2.1.9" + resolved "https://registry.yarnpkg.com/js-base64/-/js-base64-2.1.9.tgz#f0e80ae039a4bd654b5f281fc93f04a914a7fcce" + +js-tokens@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.1.tgz#08e9f132484a2c45a30907e9dc4d5567b7f114d7" + +js-types@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/js-types/-/js-types-1.0.0.tgz#d242e6494ed572ad3c92809fc8bed7f7687cbf03" + +js-yaml@^3.1.0, js-yaml@^3.4.3, js-yaml@^3.5.1, js-yaml@^3.5.3: + version "3.8.1" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.8.1.tgz#782ba50200be7b9e5a8537001b7804db3ad02628" + dependencies: + argparse "^1.0.7" + esprima "^3.1.1" + +js-yaml@~2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-2.0.5.tgz#a25ae6509999e97df278c6719da11bd0687743a8" + dependencies: + argparse "~ 0.1.11" + esprima "~ 1.0.2" + +jsbn@~0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" + +json-schema@0.2.3: + version "0.2.3" + resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" + +json-stable-stringify@^1.0.0, json-stable-stringify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json-stable-stringify/-/json-stable-stringify-1.0.1.tgz#9a759d39c5f2ff503fd5300646ed445f88c4f9af" + dependencies: + jsonify "~0.0.0" + +json-stringify-safe@~5.0.0, json-stringify-safe@~5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" + +jsonfilter@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/jsonfilter/-/jsonfilter-1.1.2.tgz#21ef7cedc75193813c75932e96a98be205ba5a11" + dependencies: + JSONStream "^0.8.4" + minimist "^1.1.0" + stream-combiner "^0.2.1" + through2 "^0.6.3" + +jsonify@~0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/jsonify/-/jsonify-0.0.0.tgz#2c74b6ee41d93ca51b7b5aaee8f503631d252a73" + +jsonparse@0.0.5: + version "0.0.5" + resolved "https://registry.yarnpkg.com/jsonparse/-/jsonparse-0.0.5.tgz#330542ad3f0a654665b778f3eb2d9a9fa507ac64" + +jsonpointer@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/jsonpointer/-/jsonpointer-4.0.1.tgz#4fd92cb34e0e9db3c89c8622ecf51f9b978c6cb9" + +jsprim@^1.2.2: + version "1.3.1" + resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.3.1.tgz#2a7256f70412a29ee3670aaca625994c4dcff252" + dependencies: + extsprintf "1.0.2" + json-schema "0.2.3" + verror "1.3.6" + +junk@~1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/junk/-/junk-1.0.3.tgz#87be63488649cbdca6f53ab39bec9ccd2347f592" + +kind-of@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.1.0.tgz#475d698a5e49ff5e53d14e3e732429dc8bf4cf47" + dependencies: + is-buffer "^1.0.2" + +known-css-properties@^0.0.6: + version "0.0.6" + resolved "https://registry.yarnpkg.com/known-css-properties/-/known-css-properties-0.0.6.tgz#71a0b8fde1b6e3431c471efbc3d9733faebbcfbf" + +latest-version@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-0.2.0.tgz#adaf898d5f22380d3f9c45386efdff0a1b5b7501" + dependencies: + package-json "^0.2.0" + +latest-version@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-1.0.1.tgz#72cfc46e3e8d1be651e1ebb54ea9f6ea96f374bb" + dependencies: + package-json "^1.0.0" + +latest-version@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-2.0.0.tgz#56f8d6139620847b8017f8f1f4d78e211324168b" + dependencies: + package-json "^2.0.0" + +lazy-cache@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/lazy-cache/-/lazy-cache-1.0.4.tgz#a1d78fc3a50474cb80845d3b3b6e1da49a446e8e" + +lazy-req@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/lazy-req/-/lazy-req-1.1.0.tgz#bdaebead30f8d824039ce0ce149d4daa07ba1fac" + +lcid@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/lcid/-/lcid-1.0.0.tgz#308accafa0bc483a3867b4b6f2b9506251d1b835" + dependencies: + invert-kv "^1.0.0" + +ldjson-stream@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/ldjson-stream/-/ldjson-stream-1.2.1.tgz#91beceda5ac4ed2b17e649fb777e7abfa0189c2b" + dependencies: + split2 "^0.2.1" + through2 "^0.6.1" + +levn@^0.3.0, levn@~0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" + dependencies: + prelude-ls "~1.1.2" + type-check "~0.3.2" + +load-grunt-tasks@^3.3.0: + version "3.5.2" + resolved "https://registry.yarnpkg.com/load-grunt-tasks/-/load-grunt-tasks-3.5.2.tgz#0728561180fd20ff8a6927505852fc58aaea0c88" + dependencies: + arrify "^1.0.0" + multimatch "^2.0.0" + pkg-up "^1.0.0" + resolve-pkg "^0.1.0" + +load-json-file@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" + dependencies: + graceful-fs "^4.1.2" + parse-json "^2.2.0" + pify "^2.0.0" + pinkie-promise "^2.0.0" + strip-bom "^2.0.0" + +load-json-file@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-2.0.0.tgz#7947e42149af80d696cbf797bcaabcfe1fe29ca8" + dependencies: + graceful-fs "^4.1.2" + parse-json "^2.2.0" + pify "^2.0.0" + strip-bom "^3.0.0" + +locate-path@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e" + dependencies: + p-locate "^2.0.0" + path-exists "^3.0.0" + +lockfile@~1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/lockfile/-/lockfile-1.0.3.tgz#2638fc39a0331e9cac1a04b71799931c9c50df79" + +lodash._isnative@~2.4.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/lodash._isnative/-/lodash._isnative-2.4.1.tgz#3ea6404b784a7be836c7b57580e1cdf79b14832c" + +lodash._objecttypes@~2.4.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/lodash._objecttypes/-/lodash._objecttypes-2.4.1.tgz#7c0b7f69d98a1f76529f890b0cdb1b4dfec11c11" + +lodash.assign@^4.0.3, lodash.assign@^4.0.6: + version "4.2.0" + resolved "https://registry.yarnpkg.com/lodash.assign/-/lodash.assign-4.2.0.tgz#0d99f3ccd7a6d261d19bdaeb9245005d285808e7" + +lodash.camelcase@^4.1.1: + version "4.3.0" + resolved "https://registry.yarnpkg.com/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz#b28aa6288a2b9fc651035c7711f65ab6190331a6" + +lodash.clonedeep@^4.3.0, lodash.clonedeep@^4.3.1: + version "4.5.0" + resolved "https://registry.yarnpkg.com/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz#e23f3f9c4f8fbdde872529c1071857a086e5ccef" + +lodash.cond@^4.3.0: + version "4.5.2" + resolved "https://registry.yarnpkg.com/lodash.cond/-/lodash.cond-4.5.2.tgz#f471a1da486be60f6ab955d17115523dd1d255d5" + +lodash.debounce@^2.4.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/lodash.debounce/-/lodash.debounce-2.4.1.tgz#d8cead246ec4b926e8b85678fc396bfeba8cc6fc" + dependencies: + lodash.isfunction "~2.4.1" + lodash.isobject "~2.4.1" + lodash.now "~2.4.1" + +lodash.defaults@^4.0.1: + version "4.2.0" + resolved "https://registry.yarnpkg.com/lodash.defaults/-/lodash.defaults-4.2.0.tgz#d09178716ffea4dde9e5fb7b37f6f0802274580c" + +lodash.defaultsdeep@^4.3.1: + version "4.6.0" + resolved "https://registry.yarnpkg.com/lodash.defaultsdeep/-/lodash.defaultsdeep-4.6.0.tgz#bec1024f85b1bd96cbea405b23c14ad6443a6f81" + +lodash.isfunction@~2.4.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/lodash.isfunction/-/lodash.isfunction-2.4.1.tgz#2cfd575c73e498ab57e319b77fa02adef13a94d1" + +lodash.isobject@~2.4.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/lodash.isobject/-/lodash.isobject-2.4.1.tgz#5a2e47fe69953f1ee631a7eba1fe64d2d06558f5" + dependencies: + lodash._objecttypes "~2.4.1" + +lodash.kebabcase@^4.0.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/lodash.kebabcase/-/lodash.kebabcase-4.1.1.tgz#8489b1cb0d29ff88195cceca448ff6d6cc295c36" + +lodash.mergewith@^4.3.1: + version "4.6.0" + resolved "https://registry.yarnpkg.com/lodash.mergewith/-/lodash.mergewith-4.6.0.tgz#150cf0a16791f5903b8891eab154609274bdea55" + +lodash.now@~2.4.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/lodash.now/-/lodash.now-2.4.1.tgz#6872156500525185faf96785bb7fe7fe15b562c6" + dependencies: + lodash._isnative "~2.4.1" + +lodash.snakecase@^4.0.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/lodash.snakecase/-/lodash.snakecase-4.1.1.tgz#39d714a35357147837aefd64b5dcbb16becd8f8d" + +lodash.upperfirst@^4.2.0: + version "4.3.1" + resolved "https://registry.yarnpkg.com/lodash.upperfirst/-/lodash.upperfirst-4.3.1.tgz#1365edf431480481ef0d1c68957a5ed99d49f7ce" + +lodash@^4.0.0, lodash@^4.1.0, lodash@^4.13.1, lodash@^4.17.4, lodash@^4.3.0: + version "4.17.4" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.4.tgz#78203a4d1c328ae1d86dca6460e369b57f4055ae" + +lodash@^4.0.1, lodash@~4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.3.0.tgz#efd9c4a6ec53f3b05412429915c3e4824e4d25a4" + +lodash@~0.10.0: + version "0.10.0" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-0.10.0.tgz#5254bbc2c46c827f535a27d631fd4f2bff374ce7" + +lodash@~0.9.2: + version "0.9.2" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-0.9.2.tgz#8f3499c5245d346d682e5b0d3b40767e09f1a92c" + +lodash@~2.4.1: + version "2.4.2" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-2.4.2.tgz#fadd834b9683073da179b3eae6d9c0d15053f73e" + +log-symbols@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-1.0.2.tgz#376ff7b58ea3086a0f09facc74617eca501e1a18" + dependencies: + chalk "^1.0.0" + +longest@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/longest/-/longest-1.0.1.tgz#30a0b2da38f73770e8294a0d22e6625ed77d0097" + +loud-rejection@^1.0.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/loud-rejection/-/loud-rejection-1.6.0.tgz#5b46f80147edee578870f086d04821cf998e551f" + dependencies: + currently-unhandled "^0.4.1" + signal-exit "^3.0.0" + +lowercase-keys@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.0.tgz#4e3366b39e7f5457e35f1324bdf6f88d0bfc7306" + +lru-cache@2: + version "2.7.3" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-2.7.3.tgz#6d4524e8b955f95d4f5b58851ce21dd72fb4e952" + +lru-cache@^4.0.0, lru-cache@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.0.2.tgz#1d17679c069cda5d040991a09dbc2c0db377e55e" + dependencies: + pseudomap "^1.0.1" + yallist "^2.0.0" + +lru-cache@~2.3.0: + version "2.3.1" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-2.3.1.tgz#b3adf6b3d856e954e2c390e6cef22081245a53d6" + +lru-cache@~2.5.0: + version "2.5.2" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-2.5.2.tgz#1fddad938aae1263ce138680be1b3f591c0ab41c" + +lru-queue@0.1: + version "0.1.0" + resolved "https://registry.yarnpkg.com/lru-queue/-/lru-queue-0.1.0.tgz#2738bd9f0d3cf4f84490c5736c48699ac632cda3" + dependencies: + es5-ext "~0.10.2" + +map-obj@^1.0.0, map-obj@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d" + +maxmin@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/maxmin/-/maxmin-1.1.0.tgz#71365e84a99dd8f8b3f7d5fde2f00d1e7f73be61" + dependencies: + chalk "^1.0.0" + figures "^1.0.1" + gzip-size "^1.0.0" + pretty-bytes "^1.0.0" + +maxmin@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/maxmin/-/maxmin-2.1.0.tgz#4d3b220903d95eee7eb7ac7fa864e72dc09a3166" + dependencies: + chalk "^1.0.0" + figures "^1.0.1" + gzip-size "^3.0.0" + pretty-bytes "^3.0.0" + +memoizee@~0.3.8: + version "0.3.10" + resolved "https://registry.yarnpkg.com/memoizee/-/memoizee-0.3.10.tgz#4eca0d8aed39ec9d017f4c5c2f2f6432f42e5c8f" + dependencies: + d "~0.1.1" + es5-ext "~0.10.11" + es6-weak-map "~0.1.4" + event-emitter "~0.3.4" + lru-queue "0.1" + next-tick "~0.2.2" + timers-ext "0.1" + +meow@^3.1.0, meow@^3.3.0, meow@^3.4.2: + version "3.7.0" + resolved "https://registry.yarnpkg.com/meow/-/meow-3.7.0.tgz#72cb668b425228290abbfa856892587308a801fb" + dependencies: + camelcase-keys "^2.0.0" + decamelize "^1.1.2" + loud-rejection "^1.0.0" + map-obj "^1.0.1" + minimist "^1.1.3" + normalize-package-data "^2.3.4" + object-assign "^4.0.1" + read-pkg-up "^1.0.1" + redent "^1.0.0" + trim-newlines "^1.0.0" + +micromatch@^2.3.11: + version "2.3.11" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-2.3.11.tgz#86677c97d1720b363431d04d0d15293bd38c1565" + dependencies: + arr-diff "^2.0.0" + array-unique "^0.2.1" + braces "^1.8.2" + expand-brackets "^0.1.4" + extglob "^0.3.1" + filename-regex "^2.0.0" + is-extglob "^1.0.0" + is-glob "^2.0.1" + kind-of "^3.0.2" + normalize-path "^2.0.1" + object.omit "^2.0.0" + parse-glob "^3.0.4" + regex-cache "^0.4.2" + +mime-db@~1.12.0: + version "1.12.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.12.0.tgz#3d0c63180f458eb10d325aaa37d7c58ae312e9d7" + +mime-db@~1.26.0: + version "1.26.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.26.0.tgz#eaffcd0e4fc6935cf8134da246e2e6c35305adff" + +mime-types@^2.1.12, mime-types@~2.1.7: + version "2.1.14" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.14.tgz#f7ef7d97583fcaf3b7d282b6f8b5679dab1e94ee" + dependencies: + mime-db "~1.26.0" + +mime-types@~1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-1.0.2.tgz#995ae1392ab8affcbfcb2641dd054e943c0d5dce" + +mime-types@~2.0.3: + version "2.0.14" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.0.14.tgz#310e159db23e077f8bb22b748dabfa4957140aa6" + dependencies: + mime-db "~1.12.0" + +mime@~1.2.11: + version "1.2.11" + resolved "https://registry.yarnpkg.com/mime/-/mime-1.2.11.tgz#58203eed86e3a5ef17aed2b7d9ebd47f0a60dd10" + +minimatch@0.3: + version "0.3.0" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-0.3.0.tgz#275d8edaac4f1bb3326472089e7949c8394699dd" + dependencies: + lru-cache "2" + sigmund "~1.0.0" + +minimatch@3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.2.tgz#0f398a7300ea441e9c348c83d98ab8c9dbf9c40a" + dependencies: + brace-expansion "^1.0.0" + +minimatch@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-1.0.0.tgz#e0dd2120b49e1b724ce8d714c520822a9438576d" + dependencies: + lru-cache "2" + sigmund "~1.0.0" + +minimatch@^3.0.0, minimatch@^3.0.2, minimatch@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.3.tgz#2a4e4090b96b2db06a9d7df01055a62a77c9b774" + dependencies: + brace-expansion "^1.0.0" + +minimatch@~0.2.11, minimatch@~0.2.12: + version "0.2.14" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-0.2.14.tgz#c74e780574f63c6f9a090e90efbe6ef53a6a756a" + dependencies: + lru-cache "2" + sigmund "~1.0.0" + +minimist@0.0.8, minimist@~0.0.1: + version "0.0.8" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d" + +minimist@^1.1.0, minimist@^1.1.3, minimist@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284" + +mkdirp@0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.0.tgz#1d73076a6df986cd9344e15e71fcc05a4c9abf12" + dependencies: + minimist "0.0.8" + +"mkdirp@>=0.5 0", mkdirp@^0.5.0, mkdirp@^0.5.1: + version "0.5.1" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" + dependencies: + minimist "0.0.8" + +mkdirp@~0.3.5: + version "0.3.5" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.3.5.tgz#de3e5f8961c88c787ee1368df849ac4413eca8d7" + +mkpath@~0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/mkpath/-/mkpath-0.1.0.tgz#7554a6f8d871834cc97b5462b122c4c124d6de91" + +mout@~0.9.0: + version "0.9.1" + resolved "https://registry.yarnpkg.com/mout/-/mout-0.9.1.tgz#84f0f3fd6acc7317f63de2affdcc0cee009b0477" + +ms@0.7.1: + version "0.7.1" + resolved "https://registry.yarnpkg.com/ms/-/ms-0.7.1.tgz#9cd13c03adbff25b65effde7ce864ee952017098" + +ms@0.7.2: + version "0.7.2" + resolved "https://registry.yarnpkg.com/ms/-/ms-0.7.2.tgz#ae25cf2512b3885a1d95d7f037868d8431124765" + +multimatch@^2.0.0, multimatch@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/multimatch/-/multimatch-2.1.0.tgz#9c7906a22fb4c02919e2f5f75161b4cdbd4b2a2b" + dependencies: + array-differ "^1.0.0" + array-union "^1.0.1" + arrify "^1.0.0" + minimatch "^3.0.0" + +mute-stream@0.0.4: + version "0.0.4" + resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.4.tgz#a9219960a6d5d5d046597aee51252c6655f7177e" + +mute-stream@0.0.5: + version "0.0.5" + resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.5.tgz#8fbfabb0a98a253d3184331f9e8deb7372fac6c0" + +mute-stream@0.0.6, mute-stream@~0.0.4: + version "0.0.6" + resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.6.tgz#48962b19e169fd1dfc240b3f1e7317627bbc47db" + +natives@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/natives/-/natives-1.1.0.tgz#e9ff841418a6b2ec7a495e939984f78f163e6e31" + +natural-compare@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + +nconf@^0.7.2: + version "0.7.2" + resolved "https://registry.yarnpkg.com/nconf/-/nconf-0.7.2.tgz#a05fdf22dc01c378dd5c4df27f2dc90b9aa8bb00" + dependencies: + async "~0.9.0" + ini "1.x.x" + yargs "~3.15.0" + +nested-error-stacks@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/nested-error-stacks/-/nested-error-stacks-1.0.2.tgz#19f619591519f096769a5ba9a86e6eeec823c3cf" + dependencies: + inherits "~2.0.1" + +next-tick@~0.2.2: + version "0.2.2" + resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-0.2.2.tgz#75da4a927ee5887e39065880065b7336413b310d" + +node-status-codes@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/node-status-codes/-/node-status-codes-1.0.0.tgz#5ae5541d024645d32a58fcddc9ceecea7ae3ac2f" + +node-uuid@~1.4.0: + version "1.4.7" + resolved "https://registry.yarnpkg.com/node-uuid/-/node-uuid-1.4.7.tgz#6da5a17668c4b3dd59623bda11cf7fa4c1f60a6f" + +nopt@~1.0.10: + version "1.0.10" + resolved "https://registry.yarnpkg.com/nopt/-/nopt-1.0.10.tgz#6ddd21bd2a31417b92727dd585f8a6f37608ebee" + dependencies: + abbrev "1" + +nopt@~2.2.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/nopt/-/nopt-2.2.1.tgz#2aa09b7d1768487b3b89a9c5aa52335bff0baea7" + dependencies: + abbrev "1" + +nopt@~3.0.0, nopt@~3.0.1: + version "3.0.6" + resolved "https://registry.yarnpkg.com/nopt/-/nopt-3.0.6.tgz#c6465dbf08abcd4db359317f79ac68a646b28ff9" + dependencies: + abbrev "1" + +normalize-package-data@^2.3.2, normalize-package-data@^2.3.4: + version "2.3.5" + resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.3.5.tgz#8d924f142960e1777e7ffe170543631cc7cb02df" + dependencies: + hosted-git-info "^2.1.4" + is-builtin-module "^1.0.0" + semver "2 || 3 || 4 || 5" + validate-npm-package-license "^3.0.1" + +normalize-path@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.0.1.tgz#47886ac1662760d4261b7d979d241709d3ce3f7a" + +normalize-range@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" + +normalize-selector@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/normalize-selector/-/normalize-selector-0.2.0.tgz#d0b145eb691189c63a78d201dc4fdb1293ef0c03" + +npm-run-path@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" + dependencies: + path-key "^2.0.0" + +npmconf@^2.0.1: + version "2.1.2" + resolved "https://registry.yarnpkg.com/npmconf/-/npmconf-2.1.2.tgz#66606a4a736f1e77a059aa071a79c94ab781853a" + dependencies: + config-chain "~1.1.8" + inherits "~2.0.0" + ini "^1.2.0" + mkdirp "^0.5.0" + nopt "~3.0.1" + once "~1.3.0" + osenv "^0.1.0" + semver "2 || 3 || 4" + uid-number "0.0.5" + +num2fraction@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/num2fraction/-/num2fraction-1.2.2.tgz#6f682b6a027a4e9ddfa4564cd2589d1d4e669ede" + +number-is-nan@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" + +oauth-sign@~0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.4.0.tgz#f22956f31ea7151a821e5f2fb32c113cad8b9f69" + +oauth-sign@~0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.5.0.tgz#d767f5169325620eab2e087ef0c472e773db6461" + +oauth-sign@~0.8.1: + version "0.8.2" + resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.8.2.tgz#46a6ab7f0aead8deae9ec0565780b7d4efeb9d43" + +obj-props@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/obj-props/-/obj-props-1.1.0.tgz#626313faa442befd4a44e9a02c3cb6bde937b511" + +object-assign@^0.3.0: + version "0.3.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-0.3.1.tgz#060e2a2a27d7c0d77ec77b78f11aa47fd88008d2" + +object-assign@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-1.0.0.tgz#e65dc8766d3b47b4b8307465c8311da030b070a6" + +object-assign@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-2.1.1.tgz#43c36e5d569ff8e4816c4efa8be02d26967c18aa" + +object-assign@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-3.0.0.tgz#9bedd5ca0897949bca47e7ff408062d549f587f2" + +object-assign@^4.0.1, object-assign@^4.1.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + +object.omit@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/object.omit/-/object.omit-2.0.1.tgz#1a9c744829f39dbb858c76ca3579ae2a54ebd1fa" + dependencies: + for-own "^0.1.4" + is-extendable "^0.1.1" + +once@^1.3.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + dependencies: + wrappy "1" + +once@~1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.2.0.tgz#de1905c636af874a8fba862d9aabddd1f920461c" + +once@~1.3.0: + version "1.3.3" + resolved "https://registry.yarnpkg.com/once/-/once-1.3.3.tgz#b2e261557ce4c314ec8304f3fa82663e4297ca20" + dependencies: + wrappy "1" + +onecolor@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/onecolor/-/onecolor-3.0.4.tgz#75a46f80da6c7aaa5b4daae17a47198bd9652494" + +onetime@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-1.1.0.tgz#a1f7838f8314c516f05ecefcbc4ccfe04b4ed789" + +open@^0.0.5: + version "0.0.5" + resolved "https://registry.yarnpkg.com/open/-/open-0.0.5.tgz#42c3e18ec95466b6bf0dc42f3a2945c3f0cad8fc" + +opn@~1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/opn/-/opn-1.0.2.tgz#b909643346d00a1abc977a8b96f3ce3c53d5cf5f" + +optimist@~0.3, optimist@~0.3.5: + version "0.3.7" + resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.3.7.tgz#c90941ad59e4273328923074d2cf2e7cbc6ec0d9" + dependencies: + wordwrap "~0.0.2" + +optimist@~0.6.0: + version "0.6.1" + resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.6.1.tgz#da3ea74686fa21a19a111c326e90eb15a0196686" + dependencies: + minimist "~0.0.1" + wordwrap "~0.0.2" + +optionator@^0.8.2: + version "0.8.2" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.2.tgz#364c5e409d3f4d6301d6c0b4c05bba50180aeb64" + dependencies: + deep-is "~0.1.3" + fast-levenshtein "~2.0.4" + levn "~0.3.0" + prelude-ls "~1.1.2" + type-check "~0.3.2" + wordwrap "~1.0.0" + +os-homedir@^1.0.0, os-homedir@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" + +os-locale@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-1.4.0.tgz#20f9f17ae29ed345e8bde583b13d2009803c14d9" + dependencies: + lcid "^1.0.0" + +os-name@^1.0.0, os-name@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/os-name/-/os-name-1.0.3.tgz#1b379f64835af7c5a7f498b357cb95215c159edf" + dependencies: + osx-release "^1.0.0" + win-release "^1.0.0" + +os-tmpdir@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" + +osenv@0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.0.3.tgz#cd6ad8ddb290915ad9e22765576025d411f29cb6" + +osenv@0.1.0, osenv@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.0.tgz#61668121eec584955030b9f470b1d2309504bfcb" + +osx-release@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/osx-release/-/osx-release-1.1.0.tgz#f217911a28136949af1bf9308b241e2737d3cd6c" + dependencies: + minimist "^1.1.0" + +p-finally@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" + +p-limit@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.1.0.tgz#b07ff2d9a5d88bec806035895a2bab66a27988bc" + +p-locate@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43" + dependencies: + p-limit "^1.1.0" + +p-throttler@0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/p-throttler/-/p-throttler-0.1.0.tgz#1b16907942c333e6f1ddeabcb3479204b8c417c4" + dependencies: + q "~0.9.2" + +package-json@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/package-json/-/package-json-0.2.0.tgz#0316e177b8eb149985d34f706b4a5543b274bec5" + dependencies: + got "^0.3.0" + registry-url "^0.1.0" + +package-json@^1.0.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/package-json/-/package-json-1.2.0.tgz#c8ecac094227cdf76a316874ed05e27cc939a0e0" + dependencies: + got "^3.2.0" + registry-url "^3.0.0" + +package-json@^2.0.0: + version "2.4.0" + resolved "https://registry.yarnpkg.com/package-json/-/package-json-2.4.0.tgz#0d15bd67d1cbbddbb2ca222ff2edb86bcb31a8bb" + dependencies: + got "^5.0.0" + registry-auth-token "^3.0.1" + registry-url "^3.0.3" + semver "^5.1.0" + +pako@~0.2.0: + version "0.2.9" + resolved "https://registry.yarnpkg.com/pako/-/pako-0.2.9.tgz#f3f7522f4ef782348da8161bad9ecfd51bf83a75" + +parse-gitignore@^0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/parse-gitignore/-/parse-gitignore-0.3.1.tgz#09adda265a4a5be2ce5e905b95a02f7f0e0044fa" + dependencies: + array-unique "^0.2.1" + is-glob "^2.0.1" + +parse-glob@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/parse-glob/-/parse-glob-3.0.4.tgz#b2c376cfb11f35513badd173ef0bb6e3a388391c" + dependencies: + glob-base "^0.3.0" + is-dotfile "^1.0.0" + is-extglob "^1.0.0" + is-glob "^2.0.0" + +parse-json@^2.1.0, parse-json@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9" + dependencies: + error-ex "^1.2.0" + +path-exists@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" + dependencies: + pinkie-promise "^2.0.0" + +path-exists@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + +path-is-inside@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" + +path-key@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" + +path-type@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-1.1.0.tgz#59c44f7ee491da704da415da5a4070ba4f8fe441" + dependencies: + graceful-fs "^4.1.2" + pify "^2.0.0" + pinkie-promise "^2.0.0" + +path-type@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-2.0.0.tgz#f012ccb8415b7096fc2daa1054c3d72389594c73" + dependencies: + pify "^2.0.0" + +pify@^2.0.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" + +pinkie-promise@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" + dependencies: + pinkie "^2.0.0" + +pinkie@^2.0.0: + version "2.0.4" + resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" + +pipetteur@^2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/pipetteur/-/pipetteur-2.0.3.tgz#1955760959e8d1a11cb2a50ec83eec470633e49f" + dependencies: + onecolor "^3.0.4" + synesthesia "^1.0.1" + +pkg-conf@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/pkg-conf/-/pkg-conf-2.0.0.tgz#071c87650403bccfb9c627f58751bfe47c067279" + dependencies: + find-up "^2.0.0" + load-json-file "^2.0.0" + +pkg-dir@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-1.0.0.tgz#7a4b508a8d5bb2d629d447056ff4e9c9314cf3d4" + dependencies: + find-up "^1.0.0" + +pkg-up@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/pkg-up/-/pkg-up-1.0.0.tgz#3e08fb461525c4421624a33b9f7e6d0af5b05a26" + dependencies: + find-up "^1.0.0" + +plur@^2.0.0, plur@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/plur/-/plur-2.1.2.tgz#7482452c1a0f508e3e344eaec312c91c29dc655a" + dependencies: + irregular-plurals "^1.0.0" + +pluralize@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/pluralize/-/pluralize-1.2.1.tgz#d1a21483fd22bb41e58a12fa3421823140897c45" + +postcss-less@^0.14.0: + version "0.14.0" + resolved "https://registry.yarnpkg.com/postcss-less/-/postcss-less-0.14.0.tgz#c631b089c6cce422b9a10f3a958d2bedd3819324" + dependencies: + postcss "^5.0.21" + +postcss-media-query-parser@^0.2.0: + version "0.2.3" + resolved "https://registry.yarnpkg.com/postcss-media-query-parser/-/postcss-media-query-parser-0.2.3.tgz#27b39c6f4d94f81b1a73b8f76351c609e5cef244" + +postcss-reporter@^1.2.1, postcss-reporter@^1.3.3: + version "1.4.1" + resolved "https://registry.yarnpkg.com/postcss-reporter/-/postcss-reporter-1.4.1.tgz#c136f0a5b161915f379dd3765c61075f7e7b9af2" + dependencies: + chalk "^1.0.0" + lodash "^4.1.0" + log-symbols "^1.0.2" + postcss "^5.0.0" + +postcss-reporter@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/postcss-reporter/-/postcss-reporter-3.0.0.tgz#09ea0f37a444c5693878606e09b018ebeff7cf8f" + dependencies: + chalk "^1.0.0" + lodash "^4.1.0" + log-symbols "^1.0.2" + postcss "^5.0.0" + +postcss-resolve-nested-selector@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/postcss-resolve-nested-selector/-/postcss-resolve-nested-selector-0.1.1.tgz#29ccbc7c37dedfac304e9fff0bf1596b3f6a0e4e" + +postcss-scss@^0.4.0: + version "0.4.1" + resolved "https://registry.yarnpkg.com/postcss-scss/-/postcss-scss-0.4.1.tgz#ad771b81f0f72f5f4845d08aa60f93557653d54c" + dependencies: + postcss "^5.2.13" + +postcss-selector-parser@^2.0.0, postcss-selector-parser@^2.1.1: + version "2.2.2" + resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-2.2.2.tgz#3d70f5adda130da51c7c0c2fc023f56b1374fe08" + dependencies: + flatten "^1.0.2" + indexes-of "^1.0.1" + uniq "^1.0.1" + +postcss-value-parser@^3.1.1, postcss-value-parser@^3.2.3: + version "3.3.0" + resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-3.3.0.tgz#87f38f9f18f774a4ab4c8a232f5c5ce8872a9d15" + +postcss@^5.0.0, postcss@^5.0.18, postcss@^5.0.20, postcss@^5.0.21, postcss@^5.0.4, postcss@^5.0.8, postcss@^5.2.13, postcss@^5.2.15, postcss@^5.2.4: + version "5.2.15" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-5.2.15.tgz#a9e8685e50e06cc5b3fdea5297273246c26f5b30" + dependencies: + chalk "^1.1.3" + js-base64 "^2.1.9" + source-map "^0.5.6" + supports-color "^3.2.3" + +prelude-ls@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" + +prepend-http@^1.0.0, prepend-http@^1.0.1: + version "1.0.4" + resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-1.0.4.tgz#d4f4562b0ce3696e41ac52d0e002e57a635dc6dc" + +preserve@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/preserve/-/preserve-0.2.0.tgz#815ed1f6ebc65926f865b310c0713bcb3315ce4b" + +pretty-bytes@^1.0.0: + version "1.0.4" + resolved "https://registry.yarnpkg.com/pretty-bytes/-/pretty-bytes-1.0.4.tgz#0a22e8210609ad35542f8c8d5d2159aff0751c84" + dependencies: + get-stdin "^4.0.1" + meow "^3.1.0" + +pretty-bytes@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/pretty-bytes/-/pretty-bytes-3.0.1.tgz#27d0008d778063a0b4811bb35c79f1bd5d5fbccf" + dependencies: + number-is-nan "^1.0.0" + +process-nextick-args@~1.0.6: + version "1.0.7" + resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-1.0.7.tgz#150e20b756590ad3f91093f25a4f2ad8bff30ba3" + +progress@^1.1.8: + version "1.1.8" + resolved "https://registry.yarnpkg.com/progress/-/progress-1.1.8.tgz#e260c78f6161cdd9b0e56cc3e0a85de17c7a57be" + +"promise@>=3.2 <8": + version "7.1.1" + resolved "https://registry.yarnpkg.com/promise/-/promise-7.1.1.tgz#489654c692616b8aa55b0724fa809bb7db49c5bf" + dependencies: + asap "~2.0.3" + +promptly@0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/promptly/-/promptly-0.2.0.tgz#73ef200fa8329d5d3a8df41798950b8646ca46d9" + dependencies: + read "~1.0.4" + +proto-list@~1.2.1: + version "1.2.4" + resolved "https://registry.yarnpkg.com/proto-list/-/proto-list-1.2.4.tgz#212d5bfe1318306a420f6402b8e26ff39647a849" + +proto-props@^0.2.0: + version "0.2.1" + resolved "https://registry.yarnpkg.com/proto-props/-/proto-props-0.2.1.tgz#5e01dc2675a0de9abfa76e799dfa334d6f483f4b" + +pseudomap@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" + +pump@^0.3.5: + version "0.3.5" + resolved "https://registry.yarnpkg.com/pump/-/pump-0.3.5.tgz#ae5ff8c1f93ed87adc6530a97565b126f585454b" + dependencies: + end-of-stream "~1.0.0" + once "~1.2.0" + +punycode@1.3.2: + version "1.3.2" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d" + +punycode@>=0.2.0, punycode@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" + +q@~0.9.2: + version "0.9.7" + resolved "https://registry.yarnpkg.com/q/-/q-0.9.7.tgz#4de2e6cb3b29088c9e4cbc03bf9d42fb96ce2f75" + +q@~1.0.0, q@~1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/q/-/q-1.0.1.tgz#11872aeedee89268110b10a718448ffb10112a14" + +qs@~1.2.0: + version "1.2.2" + resolved "https://registry.yarnpkg.com/qs/-/qs-1.2.2.tgz#19b57ff24dc2a99ce1f8bdf6afcda59f8ef61f88" + +qs@~2.3.1: + version "2.3.3" + resolved "https://registry.yarnpkg.com/qs/-/qs-2.3.3.tgz#e9e85adbe75da0bbe4c8e0476a086290f863b404" + +qs@~6.3.0: + version "6.3.1" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.3.1.tgz#918c0b3bcd36679772baf135b1acb4c1651ed79d" + +querystring@0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" + +randomatic@^1.1.3: + version "1.1.6" + resolved "https://registry.yarnpkg.com/randomatic/-/randomatic-1.1.6.tgz#110dcabff397e9dcff7c0789ccc0a49adf1ec5bb" + dependencies: + is-number "^2.0.2" + kind-of "^3.0.2" + +rc@^1.0.1, rc@^1.1.6: + version "1.1.7" + resolved "https://registry.yarnpkg.com/rc/-/rc-1.1.7.tgz#c5ea564bb07aff9fd3a5b32e906c1d3a65940fea" + dependencies: + deep-extend "~0.4.0" + ini "~1.3.0" + minimist "^1.2.0" + strip-json-comments "~2.0.1" + +read-all-stream@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/read-all-stream/-/read-all-stream-3.1.0.tgz#35c3e177f2078ef789ee4bfafa4373074eaef4fa" + dependencies: + pinkie-promise "^2.0.0" + readable-stream "^2.0.0" + +read-file-stdin@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/read-file-stdin/-/read-file-stdin-0.2.1.tgz#25eccff3a153b6809afacb23ee15387db9e0ee61" + dependencies: + gather-stream "^1.0.0" + +read-pkg-up@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-1.0.1.tgz#9d63c13276c065918d57f002a57f40a1b643fb02" + dependencies: + find-up "^1.0.0" + read-pkg "^1.0.0" + +read-pkg-up@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-2.0.0.tgz#6b72a8048984e0c41e79510fd5e9fa99b3b549be" + dependencies: + find-up "^2.0.0" + read-pkg "^2.0.0" + +read-pkg@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-1.1.0.tgz#f5ffaa5ecd29cb31c0474bca7d756b6bb29e3f28" + dependencies: + load-json-file "^1.0.0" + normalize-package-data "^2.3.2" + path-type "^1.0.0" + +read-pkg@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-2.0.0.tgz#8ef1c0623c6a6db0dc6713c4bfac46332b2368f8" + dependencies: + load-json-file "^2.0.0" + normalize-package-data "^2.3.2" + path-type "^2.0.0" + +read@~1.0.4: + version "1.0.7" + resolved "https://registry.yarnpkg.com/read/-/read-1.0.7.tgz#b3da19bd052431a97671d44a42634adf710b40c4" + dependencies: + mute-stream "~0.0.4" + +"readable-stream@>=1.0.33-1 <1.1.0-0", readable-stream@~1.0.26: + version "1.0.34" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.0.34.tgz#125820e34bc842d2f2aaafafe4c2916ee32c157c" + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.1" + isarray "0.0.1" + string_decoder "~0.10.x" + +readable-stream@^1.0.27-1, readable-stream@^1.0.33, readable-stream@~1.1.8, readable-stream@~1.1.9: + version "1.1.14" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.1.14.tgz#7cf4c54ef648e3813084c636dd2079e166c081d9" + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.1" + isarray "0.0.1" + string_decoder "~0.10.x" + +readable-stream@^2.0.0, readable-stream@^2.0.2, readable-stream@^2.0.5, readable-stream@^2.2.2: + version "2.2.3" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.2.3.tgz#9cf49463985df016c8ae8813097a9293a9b33729" + dependencies: + buffer-shims "^1.0.0" + core-util-is "~1.0.0" + inherits "~2.0.1" + isarray "~1.0.0" + process-nextick-args "~1.0.6" + string_decoder "~0.10.x" + util-deprecate "~1.0.1" + +readline2@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/readline2/-/readline2-1.0.1.tgz#41059608ffc154757b715d9989d199ffbf372e35" + dependencies: + code-point-at "^1.0.0" + is-fullwidth-code-point "^1.0.0" + mute-stream "0.0.5" + +readline2@~0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/readline2/-/readline2-0.1.1.tgz#99443ba6e83b830ef3051bfd7dc241a82728d568" + dependencies: + mute-stream "0.0.4" + strip-ansi "^2.0.1" + +rechoir@^0.6.2: + version "0.6.2" + resolved "https://registry.yarnpkg.com/rechoir/-/rechoir-0.6.2.tgz#85204b54dba82d5742e28c96756ef43af50e3384" + dependencies: + resolve "^1.1.6" + +redent@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/redent/-/redent-1.0.0.tgz#cf916ab1fd5f1f16dfb20822dd6ec7f730c2afde" + dependencies: + indent-string "^2.1.0" + strip-indent "^1.0.1" + +redeyed@~0.4.0: + version "0.4.4" + resolved "https://registry.yarnpkg.com/redeyed/-/redeyed-0.4.4.tgz#37e990a6f2b21b2a11c2e6a48fd4135698cba97f" + dependencies: + esprima "~1.0.4" + +regex-cache@^0.4.2: + version "0.4.3" + resolved "https://registry.yarnpkg.com/regex-cache/-/regex-cache-0.4.3.tgz#9b1a6c35d4d0dfcef5711ae651e8e9d3d7114145" + dependencies: + is-equal-shallow "^0.1.3" + is-primitive "^2.0.0" + +registry-auth-token@^3.0.1: + version "3.1.0" + resolved "https://registry.yarnpkg.com/registry-auth-token/-/registry-auth-token-3.1.0.tgz#997c08256e0c7999837b90e944db39d8a790276b" + dependencies: + rc "^1.1.6" + +registry-url@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/registry-url/-/registry-url-0.1.1.tgz#1739427b81b110b302482a1c7cd727ffcc82d5be" + dependencies: + npmconf "^2.0.1" + +registry-url@^3.0.0, registry-url@^3.0.3: + version "3.1.0" + resolved "https://registry.yarnpkg.com/registry-url/-/registry-url-3.1.0.tgz#3d4ef870f73dde1d77f0cf9a381432444e174942" + dependencies: + rc "^1.0.1" + +repeat-element@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.2.tgz#ef089a178d1483baae4d93eb98b4f9e4e11d990a" + +repeat-string@^1.5.2: + version "1.6.1" + resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" + +repeating@^1.1.2: + version "1.1.3" + resolved "https://registry.yarnpkg.com/repeating/-/repeating-1.1.3.tgz#3d4114218877537494f97f77f9785fab810fa4ac" + dependencies: + is-finite "^1.0.0" + +repeating@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/repeating/-/repeating-2.0.1.tgz#5214c53a926d3552707527fbab415dbc08d06dda" + dependencies: + is-finite "^1.0.0" + +req-all@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/req-all/-/req-all-0.1.0.tgz#130051e2ace58a02eacbfc9d448577a736a9273a" + +request-progress@0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/request-progress/-/request-progress-0.3.0.tgz#bdf2062bfc197c5d492500d44cb3aff7865b492e" + dependencies: + throttleit "~0.0.2" + +request-replay@~0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/request-replay/-/request-replay-0.2.0.tgz#9b693a5d118b39f5c596ead5ed91a26444057f60" + dependencies: + retry "~0.6.0" + +request@^2.40.0, request@^2.74.0: + version "2.79.0" + resolved "https://registry.yarnpkg.com/request/-/request-2.79.0.tgz#4dfe5bf6be8b8cdc37fcf93e04b65577722710de" + dependencies: + aws-sign2 "~0.6.0" + aws4 "^1.2.1" + caseless "~0.11.0" + combined-stream "~1.0.5" + extend "~3.0.0" + forever-agent "~0.6.1" + form-data "~2.1.1" + har-validator "~2.0.6" + hawk "~3.1.3" + http-signature "~1.1.0" + is-typedarray "~1.0.0" + isstream "~0.1.2" + json-stringify-safe "~5.0.1" + mime-types "~2.1.7" + oauth-sign "~0.8.1" + qs "~6.3.0" + stringstream "~0.0.4" + tough-cookie "~2.3.0" + tunnel-agent "~0.4.1" + uuid "^3.0.0" + +request@~2.42.0: + version "2.42.0" + resolved "https://registry.yarnpkg.com/request/-/request-2.42.0.tgz#572bd0148938564040ac7ab148b96423a063304a" + dependencies: + bl "~0.9.0" + caseless "~0.6.0" + forever-agent "~0.5.0" + json-stringify-safe "~5.0.0" + mime-types "~1.0.1" + node-uuid "~1.4.0" + qs "~1.2.0" + tunnel-agent "~0.4.0" + optionalDependencies: + aws-sign2 "~0.5.0" + form-data "~0.1.0" + hawk "1.1.1" + http-signature "~0.10.0" + oauth-sign "~0.4.0" + stringstream "~0.0.4" + tough-cookie ">=0.12.0" + +request@~2.51.0: + version "2.51.0" + resolved "https://registry.yarnpkg.com/request/-/request-2.51.0.tgz#35d00bbecc012e55f907b1bd9e0dbd577bfef26e" + dependencies: + aws-sign2 "~0.5.0" + bl "~0.9.0" + caseless "~0.8.0" + combined-stream "~0.0.5" + forever-agent "~0.5.0" + form-data "~0.2.0" + hawk "1.1.1" + http-signature "~0.10.0" + json-stringify-safe "~5.0.0" + mime-types "~1.0.1" + node-uuid "~1.4.0" + oauth-sign "~0.5.0" + qs "~2.3.1" + stringstream "~0.0.4" + tough-cookie ">=0.12.0" + tunnel-agent "~0.4.0" + +require-directory@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + +require-from-string@^1.1.0: + version "1.2.1" + resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-1.2.1.tgz#529c9ccef27380adfec9a2f965b649bbee636418" + +require-main-filename@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1" + +require-uncached@^1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/require-uncached/-/require-uncached-1.0.3.tgz#4e0d56d6c9662fd31e43011c4b95aa49955421d3" + dependencies: + caller-path "^0.1.0" + resolve-from "^1.0.0" + +resolve-cwd@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-1.0.0.tgz#4eaeea41ed040d1702457df64a42b2b07d246f9f" + dependencies: + resolve-from "^2.0.0" + +resolve-from@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-1.0.1.tgz#26cbfe935d1aeeeabb29bc3fe5aeb01e93d44226" + +resolve-from@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-2.0.0.tgz#9480ab20e94ffa1d9e80a804c7ea147611966b57" + +resolve-pkg@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/resolve-pkg/-/resolve-pkg-0.1.0.tgz#02cc993410e2936962bd97166a1b077da9725531" + dependencies: + resolve-from "^2.0.0" + +resolve@^1.1.6: + version "1.2.0" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.2.0.tgz#9589c3f2f6149d1417a40becc1663db6ec6bc26c" + +resolve@~0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-0.3.1.tgz#34c63447c664c70598d1c9b126fc43b2a24310a4" + +restore-cursor@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-1.0.1.tgz#34661f46886327fed2991479152252df92daa541" + dependencies: + exit-hook "^1.0.0" + onetime "^1.0.0" + +retry@0.6.0, retry@~0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/retry/-/retry-0.6.0.tgz#1c010713279a6fd1e8def28af0c3ff1871caa537" + +right-align@^0.1.1: + version "0.1.3" + resolved "https://registry.yarnpkg.com/right-align/-/right-align-0.1.3.tgz#61339b722fe6a3515689210d24e14c96148613ef" + dependencies: + align-text "^0.1.1" + +rimraf@2, rimraf@^2.2.8, rimraf@^2.5.1: + version "2.6.0" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.0.tgz#89b8a0fe432b9ff9ec9a925a00b6cdb3a91bbada" + dependencies: + glob "^7.0.5" + +rimraf@~2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.0.3.tgz#f50a2965e7144e9afd998982f15df706730f56a9" + optionalDependencies: + graceful-fs "~1.1" + +rimraf@~2.2.0, rimraf@~2.2.8: + version "2.2.8" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.2.8.tgz#e439be2aaee327321952730f99a8929e4fc50582" + +run-async@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/run-async/-/run-async-0.1.0.tgz#c8ad4a5e110661e402a7d21b530e009f25f8e389" + dependencies: + once "^1.3.0" + +run-async@^2.2.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/run-async/-/run-async-2.3.0.tgz#0371ab4ae0bdd720d4166d7dfda64ff7a445a6c0" + dependencies: + is-promise "^2.1.0" + +rx-lite@^3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/rx-lite/-/rx-lite-3.1.2.tgz#19ce502ca572665f3b647b10939f97fd1615f102" + +rx@^2.2.27: + version "2.5.3" + resolved "https://registry.yarnpkg.com/rx/-/rx-2.5.3.tgz#21adc7d80f02002af50dae97fd9dbf248755f566" + +rx@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/rx/-/rx-4.1.0.tgz#a5f13ff79ef3b740fe30aa803fb09f98805d4782" + +semver-diff@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-0.1.0.tgz#4f6057ca3eba23cc484b51f64aaf88b131a3855d" + dependencies: + semver "^2.2.1" + +semver-diff@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-2.1.0.tgz#4bbb8437c8d37e4b0cf1a68fd726ec6d645d6d36" + dependencies: + semver "^5.0.3" + +"semver@2 || 3 || 4", semver@^2.2.1, semver@~2.3.0: + version "2.3.2" + resolved "https://registry.yarnpkg.com/semver/-/semver-2.3.2.tgz#b9848f25d6cf36333073ec9ef8856d42f1233e52" + +"semver@2 || 3 || 4 || 5", semver@^5.0.1, semver@^5.0.3, semver@^5.1.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.3.0.tgz#9b2ce5d3de02d17c6012ad326aa6b4d0cf54f94f" + +set-blocking@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" + +shell-quote@~1.4.1: + version "1.4.3" + resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.4.3.tgz#952c44e0b1ed9013ef53958179cc643e8777466b" + dependencies: + array-filter "~0.0.0" + array-map "~0.0.0" + array-reduce "~0.0.0" + jsonify "~0.0.0" + +shelljs@^0.7.5: + version "0.7.6" + resolved "https://registry.yarnpkg.com/shelljs/-/shelljs-0.7.6.tgz#379cccfb56b91c8601e4793356eb5382924de9ad" + dependencies: + glob "^7.0.0" + interpret "^1.0.0" + rechoir "^0.6.2" + +sigmund@~1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/sigmund/-/sigmund-1.0.1.tgz#3ff21f198cad2175f9f3b781853fd94d0d19b590" + +signal-exit@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" + +slice-ansi@0.0.4: + version "0.0.4" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-0.0.4.tgz#edbf8903f66f7ce2f8eafd6ceed65e264c831b35" + +slide@^1.1.5: + version "1.1.6" + resolved "https://registry.yarnpkg.com/slide/-/slide-1.1.6.tgz#56eb027d65b4d2dce6cb2e2d32c4d4afc9e1d707" + +sntp@0.2.x: + version "0.2.4" + resolved "https://registry.yarnpkg.com/sntp/-/sntp-0.2.4.tgz#fb885f18b0f3aad189f824862536bceeec750900" + dependencies: + hoek "0.9.x" + +sntp@1.x.x: + version "1.0.9" + resolved "https://registry.yarnpkg.com/sntp/-/sntp-1.0.9.tgz#6541184cc90aeea6c6e7b35e2659082443c66198" + dependencies: + hoek "2.x.x" + +snyk-config@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/snyk-config/-/snyk-config-1.0.1.tgz#f27aec2498b24027ac719214026521591111508f" + dependencies: + debug "^2.2.0" + nconf "^0.7.2" + path-is-absolute "^1.0.0" + +snyk-module@1.7.0, snyk-module@^1.6.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/snyk-module/-/snyk-module-1.7.0.tgz#07c6ca8556d281de6f9e2368c04ecb6dd1f2631a" + dependencies: + debug "^2.2.0" + hosted-git-info "^2.1.4" + validate-npm-package-name "^2.2.2" + +snyk-policy@1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/snyk-policy/-/snyk-policy-1.7.0.tgz#2151c751ab1edc040fc6b94a872aa989db492324" + dependencies: + debug "^2.2.0" + es6-promise "^3.1.2" + js-yaml "^3.5.3" + lodash.clonedeep "^4.3.1" + semver "^5.1.0" + snyk-module "^1.6.0" + snyk-resolve "^1.0.0" + snyk-try-require "^1.1.1" + then-fs "^2.0.0" + +snyk-recursive-readdir@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/snyk-recursive-readdir/-/snyk-recursive-readdir-2.0.0.tgz#5cb59e94698169e0205a60e7d6a506d0b4d52ff3" + dependencies: + minimatch "3.0.2" + +snyk-resolve-deps@1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/snyk-resolve-deps/-/snyk-resolve-deps-1.7.0.tgz#13743a058437dff890baaf437c333c966a743cb6" + dependencies: + abbrev "^1.0.7" + ansicolors "^0.3.2" + clite "^0.3.0" + debug "^2.2.0" + es6-promise "^3.0.2" + lodash "^4.0.0" + lru-cache "^4.0.0" + minimist "^1.2.0" + semver "^5.1.0" + snyk-module "^1.6.0" + snyk-resolve "^1.0.0" + snyk-tree "^1.0.0" + snyk-try-require "^1.1.1" + then-fs "^2.0.0" + +snyk-resolve@1.0.0, snyk-resolve@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/snyk-resolve/-/snyk-resolve-1.0.0.tgz#bbe9196d37f57c39251e6be75ccdd5b2097e99a2" + dependencies: + debug "^2.2.0" + then-fs "^2.0.0" + +snyk-tree@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/snyk-tree/-/snyk-tree-1.0.0.tgz#0fb73176dbf32e782f19100294160448f9111cc8" + dependencies: + archy "^1.0.0" + +snyk-try-require@^1.1.1, snyk-try-require@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/snyk-try-require/-/snyk-try-require-1.2.0.tgz#30fc2b11c07064591ee35780c826be91312f2144" + dependencies: + debug "^2.2.0" + es6-promise "^3.1.2" + lodash.clonedeep "^4.3.0" + lru-cache "^4.0.0" + then-fs "^2.0.0" + +snyk@^1.9.1: + version "1.25.1" + resolved "https://registry.yarnpkg.com/snyk/-/snyk-1.25.1.tgz#a034bbff29bb58f0ff7609aa8c90540a803a7b4a" + dependencies: + abbrev "^1.0.7" + ansi-escapes "^1.3.0" + chalk "^1.1.1" + configstore "^1.2.0" + debug "^2.2.0" + es6-promise "^3.0.2" + hasbin "^1.2.3" + inquirer "1.0.3" + open "^0.0.5" + os-name "^1.0.3" + request "^2.74.0" + semver "^5.1.0" + snyk-config "1.0.1" + snyk-module "1.7.0" + snyk-policy "1.7.0" + snyk-recursive-readdir "^2.0.0" + snyk-resolve "1.0.0" + snyk-resolve-deps "1.7.0" + snyk-tree "^1.0.0" + snyk-try-require "^1.2.0" + tempfile "^1.1.1" + then-fs "^2.0.0" + undefsafe "0.0.3" + update-notifier "^0.5.0" + url "^0.11.0" + uuid "^3.0.1" + +sort-keys@^1.1.1: + version "1.1.2" + resolved "https://registry.yarnpkg.com/sort-keys/-/sort-keys-1.1.2.tgz#441b6d4d346798f1b4e49e8920adfba0e543f9ad" + dependencies: + is-plain-obj "^1.0.0" + +source-map@0.4.x, source-map@^0.4.2: + version "0.4.4" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.4.4.tgz#eba4f5da9c0dc999de68032d8b4f76173652036b" + dependencies: + amdefine ">=0.0.4" + +source-map@^0.5.6, source-map@~0.5.1: + version "0.5.6" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.6.tgz#75ce38f52bf0733c5a7f0c118d81334a2bb5f412" + +source-map@~0.1.7: + version "0.1.43" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.1.43.tgz#c24bc146ca517c1471f5dacbe2571b2b7f9e3346" + dependencies: + amdefine ">=0.0.4" + +spdx-correct@~1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-1.0.2.tgz#4b3073d933ff51f3912f03ac5519498a4150db40" + dependencies: + spdx-license-ids "^1.0.2" + +spdx-expression-parse@~1.0.0: + version "1.0.4" + resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-1.0.4.tgz#9bdf2f20e1f40ed447fbe273266191fced51626c" + +spdx-license-ids@^1.0.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-1.2.2.tgz#c9df7a3424594ade6bd11900d596696dc06bac57" + +specificity@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/specificity/-/specificity-0.3.0.tgz#332472d4e5eb5af20821171933998a6bc3b1ce6f" + +split2@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/split2/-/split2-0.2.1.tgz#02ddac9adc03ec0bb78c1282ec079ca6e85ae900" + dependencies: + through2 "~0.6.1" + +sprintf-js@~1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + +sshpk@^1.7.0: + version "1.10.2" + resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.10.2.tgz#d5a804ce22695515638e798dbe23273de070a5fa" + dependencies: + asn1 "~0.2.3" + assert-plus "^1.0.0" + dashdash "^1.12.0" + getpass "^0.1.1" + optionalDependencies: + bcrypt-pbkdf "^1.0.0" + ecc-jsbn "~0.1.1" + jodid25519 "^1.0.0" + jsbn "~0.1.0" + tweetnacl "~0.14.0" + +stream-combiner@^0.2.1: + version "0.2.2" + resolved "https://registry.yarnpkg.com/stream-combiner/-/stream-combiner-0.2.2.tgz#aec8cbac177b56b6f4fa479ced8c1912cee52858" + dependencies: + duplexer "~0.1.1" + through "~2.3.4" + +stream-shift@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.0.tgz#d5c752825e5367e786f78e18e445ea223a155952" + +string-length@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/string-length/-/string-length-0.1.2.tgz#ab04bb33867ee74beed7fb89bb7f089d392780f2" + dependencies: + strip-ansi "^0.2.1" + +string-length@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/string-length/-/string-length-1.0.1.tgz#56970fb1c38558e9e70b728bf3de269ac45adfac" + dependencies: + strip-ansi "^3.0.0" + +string-width@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" + dependencies: + code-point-at "^1.0.0" + is-fullwidth-code-point "^1.0.0" + strip-ansi "^3.0.0" + +string-width@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.0.0.tgz#635c5436cc72a6e0c387ceca278d4e2eec52687e" + dependencies: + is-fullwidth-code-point "^2.0.0" + strip-ansi "^3.0.0" + +string_decoder@~0.10.x: + version "0.10.31" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94" + +stringify-object@~1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/stringify-object/-/stringify-object-1.0.1.tgz#86d35e7dbfbce9aa45637d7ecdd7847e159db8a2" + +stringstream@~0.0.4: + version "0.0.5" + resolved "https://registry.yarnpkg.com/stringstream/-/stringstream-0.0.5.tgz#4e484cd4de5a0bbbee18e46307710a8a81621878" + +strip-ansi@^0.2.1: + version "0.2.2" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-0.2.2.tgz#854d290c981525fc8c397a910b025ae2d54ffc08" + dependencies: + ansi-regex "^0.1.0" + +strip-ansi@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-0.3.0.tgz#25f48ea22ca79187f3174a4db8759347bb126220" + dependencies: + ansi-regex "^0.2.1" + +strip-ansi@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-2.0.1.tgz#df62c1aa94ed2f114e1d0f21fd1d50482b79a60e" + dependencies: + ansi-regex "^1.0.0" + +strip-ansi@^3.0.0, strip-ansi@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" + dependencies: + ansi-regex "^2.0.0" + +strip-bom@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-2.0.0.tgz#6219a85616520491f35788bdbf1447a99c7e6b0e" + dependencies: + is-utf8 "^0.2.0" + +strip-bom@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + +strip-eof@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" + +strip-indent@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-1.0.1.tgz#0c7962a6adefa7bbd4ac366460a638552ae1a0a2" + dependencies: + get-stdin "^4.0.1" + +strip-json-comments@~2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" + +style-search@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/style-search/-/style-search-0.1.0.tgz#7958c793e47e32e07d2b5cafe5c0bf8e12e77902" + +stylehacks@^2.3.0: + version "2.3.2" + resolved "https://registry.yarnpkg.com/stylehacks/-/stylehacks-2.3.2.tgz#64c83e0438a68c9edf449e8c552a7d9ab6009b0b" + dependencies: + browserslist "^1.1.3" + chalk "^1.1.1" + log-symbols "^1.0.2" + minimist "^1.2.0" + plur "^2.1.2" + postcss "^5.0.18" + postcss-reporter "^1.3.3" + postcss-selector-parser "^2.0.0" + read-file-stdin "^0.2.1" + text-table "^0.2.0" + write-file-stdout "0.0.2" + +stylelint@^7.2.0: + version "7.9.0" + resolved "https://registry.yarnpkg.com/stylelint/-/stylelint-7.9.0.tgz#b8d9ea20f887ab351075c6aded9528de24509327" + dependencies: + autoprefixer "^6.0.0" + balanced-match "^0.4.0" + chalk "^1.1.1" + colorguard "^1.2.0" + cosmiconfig "^2.1.1" + doiuse "^2.4.1" + execall "^1.0.0" + get-stdin "^5.0.0" + globby "^6.0.0" + globjoin "^0.1.4" + html-tags "^1.1.1" + ignore "^3.2.0" + known-css-properties "^0.0.6" + lodash "^4.17.4" + log-symbols "^1.0.2" + meow "^3.3.0" + micromatch "^2.3.11" + normalize-selector "^0.2.0" + postcss "^5.0.20" + postcss-less "^0.14.0" + postcss-media-query-parser "^0.2.0" + postcss-reporter "^3.0.0" + postcss-resolve-nested-selector "^0.1.1" + postcss-scss "^0.4.0" + postcss-selector-parser "^2.1.1" + postcss-value-parser "^3.1.1" + resolve-from "^2.0.0" + specificity "^0.3.0" + string-width "^2.0.0" + style-search "^0.1.0" + stylehacks "^2.3.0" + sugarss "^0.2.0" + svg-tags "^1.0.0" + table "^4.0.1" + +sugarss@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/sugarss/-/sugarss-0.2.0.tgz#ac34237563327c6ff897b64742bf6aec190ad39e" + dependencies: + postcss "^5.2.4" + +supports-color@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-0.2.0.tgz#d92de2694eb3f67323973d7ae3d8b55b4c22190a" + +supports-color@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" + +supports-color@^3.2.3: + version "3.2.3" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.2.3.tgz#65ac0504b3954171d8a64946b2ae3cbb8a5f54f6" + dependencies: + has-flag "^1.0.0" + +svg-tags@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/svg-tags/-/svg-tags-1.0.0.tgz#58f71cee3bd519b59d4b2a843b6c7de64ac04764" + +synesthesia@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/synesthesia/-/synesthesia-1.0.1.tgz#5ef95ea548c0d5c6e6f9bb4b0d0731dff864a777" + dependencies: + css-color-names "0.0.3" + +table@^3.7.8: + version "3.8.3" + resolved "https://registry.yarnpkg.com/table/-/table-3.8.3.tgz#2bbc542f0fda9861a755d3947fefd8b3f513855f" + dependencies: + ajv "^4.7.0" + ajv-keywords "^1.0.0" + chalk "^1.1.1" + lodash "^4.0.0" + slice-ansi "0.0.4" + string-width "^2.0.0" + +table@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/table/-/table-4.0.1.tgz#a8116c133fac2c61f4a420ab6cdf5c4d61f0e435" + dependencies: + ajv "^4.7.0" + ajv-keywords "^1.0.0" + chalk "^1.1.1" + lodash "^4.0.0" + slice-ansi "0.0.4" + string-width "^2.0.0" + +tar-fs@0.5.2: + version "0.5.2" + resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-0.5.2.tgz#0f59424be7eeee45232316e302f66d3f6ea6db3e" + dependencies: + mkdirp "^0.5.0" + pump "^0.3.5" + tar-stream "^0.4.6" + +tar-stream@^0.4.6: + version "0.4.7" + resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-0.4.7.tgz#1f1d2ce9ebc7b42765243ca0e8f1b7bfda0aadcd" + dependencies: + bl "^0.9.0" + end-of-stream "^1.0.0" + readable-stream "^1.0.27-1" + xtend "^4.0.0" + +tempfile@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/tempfile/-/tempfile-1.1.1.tgz#5bcc4eaecc4ab2c707d8bc11d99ccc9a2cb287f2" + dependencies: + os-tmpdir "^1.0.0" + uuid "^2.0.1" + +text-table@^0.2.0, text-table@~0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + +the-argv@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/the-argv/-/the-argv-1.0.0.tgz#0084705005730dd84db755253c931ae398db9522" + +then-fs@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/then-fs/-/then-fs-2.0.0.tgz#72f792dd9d31705a91ae19ebfcf8b3f968c81da2" + dependencies: + promise ">=3.2 <8" + +throttleit@~0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/throttleit/-/throttleit-0.0.2.tgz#cfedf88e60c00dd9697b61fdd2a8343a9b680eaf" + +through2@^0.6.1, through2@^0.6.3, through2@~0.6.1: + version "0.6.5" + resolved "https://registry.yarnpkg.com/through2/-/through2-0.6.5.tgz#41ab9c67b29d57209071410e1d7a7a968cd3ad48" + dependencies: + readable-stream ">=1.0.33-1 <1.1.0-0" + xtend ">=4.0.0 <4.1.0-0" + +"through@>=2.2.7 <3", through@^2.3.6, through@~2.3.4: + version "2.3.8" + resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" + +timed-out@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/timed-out/-/timed-out-2.0.0.tgz#f38b0ae81d3747d628001f41dafc652ace671c0a" + +timed-out@^3.0.0: + version "3.1.3" + resolved "https://registry.yarnpkg.com/timed-out/-/timed-out-3.1.3.tgz#95860bfcc5c76c277f8f8326fd0f5b2e20eba217" + +timers-ext@0.1: + version "0.1.0" + resolved "https://registry.yarnpkg.com/timers-ext/-/timers-ext-0.1.0.tgz#00345a2ca93089d1251322054389d263e27b77e2" + dependencies: + es5-ext "~0.10.2" + next-tick "~0.2.2" + +tmp@0.0.23: + version "0.0.23" + resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.23.tgz#de874aa5e974a85f0a32cdfdbd74663cb3bd9c74" + +touch@0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/touch/-/touch-0.0.2.tgz#a65a777795e5cbbe1299499bdc42281ffb21b5f4" + dependencies: + nopt "~1.0.10" + +tough-cookie@>=0.12.0, tough-cookie@~2.3.0: + version "2.3.2" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.3.2.tgz#f081f76e4c85720e6c37a5faced737150d84072a" + dependencies: + punycode "^1.4.1" + +tough-cookie@^0.12.1: + version "0.12.1" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-0.12.1.tgz#8220c7e21abd5b13d96804254bd5a81ebf2c7d62" + dependencies: + punycode ">=0.2.0" + +"traverse@>=0.3.0 <0.4": + version "0.3.9" + resolved "https://registry.yarnpkg.com/traverse/-/traverse-0.3.9.tgz#717b8f220cc0bb7b44e40514c22b2e8bbc70d8b9" + +trim-newlines@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-1.0.0.tgz#5887966bb582a4503a41eb524f7d35011815a613" + +tryit@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/tryit/-/tryit-1.0.3.tgz#393be730a9446fd1ead6da59a014308f36c289cb" + +tunnel-agent@~0.4.0, tunnel-agent@~0.4.1: + version "0.4.3" + resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.4.3.tgz#6373db76909fe570e08d73583365ed828a74eeeb" + +tweetnacl@^0.14.3, tweetnacl@~0.14.0: + version "0.14.5" + resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" + +type-check@~0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" + dependencies: + prelude-ls "~1.1.2" + +typedarray@^0.0.6: + version "0.0.6" + resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" + +uglify-js@~2.3: + version "2.3.6" + resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-2.3.6.tgz#fa0984770b428b7a9b2a8058f46355d14fef211a" + dependencies: + async "~0.2.6" + optimist "~0.3.5" + source-map "~0.1.7" + +uglify-js@~2.6.0: + version "2.6.4" + resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-2.6.4.tgz#65ea2fb3059c9394692f15fed87c2b36c16b9adf" + dependencies: + async "~0.2.6" + source-map "~0.5.1" + uglify-to-browserify "~1.0.0" + yargs "~3.10.0" + +uglify-to-browserify@~1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz#6e0924d6bda6b5afe349e39a6d632850a0f882b7" + +uid-number@0.0.5: + version "0.0.5" + resolved "https://registry.yarnpkg.com/uid-number/-/uid-number-0.0.5.tgz#5a3db23ef5dbd55b81fce0ec9a2ac6fccdebb81e" + +undefsafe@0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/undefsafe/-/undefsafe-0.0.3.tgz#ecca3a03e56b9af17385baac812ac83b994a962f" + +underscore.string@~2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/underscore.string/-/underscore.string-2.2.1.tgz#d7c0fa2af5d5a1a67f4253daee98132e733f0f19" + +underscore.string@~2.3.3: + version "2.3.3" + resolved "https://registry.yarnpkg.com/underscore.string/-/underscore.string-2.3.3.tgz#71c08bf6b428b1133f37e78fa3a21c82f7329b0d" + +underscore.string@~2.4.0: + version "2.4.0" + resolved "https://registry.yarnpkg.com/underscore.string/-/underscore.string-2.4.0.tgz#8cdd8fbac4e2d2ea1e7e2e8097c42f442280f85b" + +underscore.string@~3.2.3: + version "3.2.3" + resolved "https://registry.yarnpkg.com/underscore.string/-/underscore.string-3.2.3.tgz#806992633665d5e5fcb4db1fb3a862eb68e9e6da" + +underscore@~1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.7.0.tgz#6bbaf0877500d36be34ecaa584e0db9fef035209" + +uniq@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/uniq/-/uniq-1.0.1.tgz#b31c5ae8254844a3a8281541ce2b04b865a734ff" + +unzip-response@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/unzip-response/-/unzip-response-1.0.2.tgz#b984f0877fc0a89c2c773cc1ef7b5b232b5b06fe" + +update-notifier@0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/update-notifier/-/update-notifier-0.2.0.tgz#a010c928adcf02090b8e0ce7fef6fb0a7cacc34a" + dependencies: + chalk "^0.5.0" + configstore "^0.3.0" + latest-version "^0.2.0" + semver-diff "^0.1.0" + string-length "^0.1.2" + +update-notifier@^0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/update-notifier/-/update-notifier-0.5.0.tgz#07b5dc2066b3627ab3b4f530130f7eddda07a4cc" + dependencies: + chalk "^1.0.0" + configstore "^1.0.0" + is-npm "^1.0.0" + latest-version "^1.0.0" + repeating "^1.1.2" + semver-diff "^2.0.0" + string-length "^1.0.0" + +update-notifier@^0.6.0: + version "0.6.3" + resolved "https://registry.yarnpkg.com/update-notifier/-/update-notifier-0.6.3.tgz#776dec8daa13e962a341e8a1d98354306b67ae08" + dependencies: + boxen "^0.3.1" + chalk "^1.0.0" + configstore "^2.0.0" + is-npm "^1.0.0" + latest-version "^2.0.0" + semver-diff "^2.0.0" + +update-notifier@^1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/update-notifier/-/update-notifier-1.0.3.tgz#8f92c515482bd6831b7c93013e70f87552c7cf5a" + dependencies: + boxen "^0.6.0" + chalk "^1.0.0" + configstore "^2.0.0" + is-npm "^1.0.0" + latest-version "^2.0.0" + lazy-req "^1.1.0" + semver-diff "^2.0.0" + xdg-basedir "^2.0.0" + +uri-path@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/uri-path/-/uri-path-1.0.0.tgz#9747f018358933c31de0fccfd82d138e67262e32" + +url-parse-lax@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-1.0.0.tgz#7af8f303645e9bd79a272e7a14ac68bc0609da73" + dependencies: + prepend-http "^1.0.1" + +url@^0.11.0: + version "0.11.0" + resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1" + dependencies: + punycode "1.3.2" + querystring "0.2.0" + +user-home@^1.0.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/user-home/-/user-home-1.1.1.tgz#2b5be23a32b63a7c9deb8d0f28d485724a3df190" + +user-home@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/user-home/-/user-home-2.0.0.tgz#9c70bfd8169bc1dcbf48604e0f04b8b49cde9e9f" + dependencies: + os-homedir "^1.0.0" + +util-deprecate@~1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + +uuid@^2.0.1: + version "2.0.3" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-2.0.3.tgz#67e2e863797215530dff318e5bf9dcebfd47b21a" + +uuid@^3.0.0, uuid@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.0.1.tgz#6544bba2dfda8c1cf17e629a3a305e2bb1fee6c1" + +validate-npm-package-license@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.1.tgz#2804babe712ad3379459acfbe24746ab2c303fbc" + dependencies: + spdx-correct "~1.0.0" + spdx-expression-parse "~1.0.0" + +validate-npm-package-name@^2.2.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/validate-npm-package-name/-/validate-npm-package-name-2.2.2.tgz#f65695b22f7324442019a3c7fa39a6e7fd299085" + dependencies: + builtins "0.0.7" + +verror@1.3.6: + version "1.3.6" + resolved "https://registry.yarnpkg.com/verror/-/verror-1.3.6.tgz#cff5df12946d297d2baaefaa2689e25be01c005c" + dependencies: + extsprintf "1.0.2" + +which-module@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/which-module/-/which-module-1.0.0.tgz#bba63ca861948994ff307736089e3b96026c2a4f" + +which@^1.2.9: + version "1.2.12" + resolved "https://registry.yarnpkg.com/which/-/which-1.2.12.tgz#de67b5e450269f194909ef23ece4ebe416fa1192" + dependencies: + isexe "^1.1.1" + +which@~1.0.5: + version "1.0.9" + resolved "https://registry.yarnpkg.com/which/-/which-1.0.9.tgz#460c1da0f810103d0321a9b633af9e575e64486f" + +widest-line@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/widest-line/-/widest-line-1.0.0.tgz#0c09c85c2a94683d0d7eaf8ee097d564bf0e105c" + dependencies: + string-width "^1.0.1" + +win-release@^1.0.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/win-release/-/win-release-1.1.1.tgz#5fa55e02be7ca934edfc12665632e849b72e5209" + dependencies: + semver "^5.0.1" + +window-size@0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/window-size/-/window-size-0.1.0.tgz#5438cd2ea93b202efa3a19fe8887aee7c94f9c9d" + +window-size@^0.1.1: + version "0.1.4" + resolved "https://registry.yarnpkg.com/window-size/-/window-size-0.1.4.tgz#f8e1aa1ee5a53ec5bf151ffa09742a6ad7697876" + +window-size@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/window-size/-/window-size-0.2.0.tgz#b4315bb4214a3d7058ebeee892e13fa24d98b075" + +wordwrap@0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.2.tgz#b79669bb42ecb409f83d583cad52ca17eaa1643f" + +wordwrap@~0.0.2: + version "0.0.3" + resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.3.tgz#a3d5da6cd5c0bc0008d37234bbaf1bed63059107" + +wordwrap@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" + +wrap-ansi@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85" + dependencies: + string-width "^1.0.1" + strip-ansi "^3.0.1" + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + +wrench@~1.4.3: + version "1.4.4" + resolved "https://registry.yarnpkg.com/wrench/-/wrench-1.4.4.tgz#7f523efdb71b0100e77dce834c06523cbe3d54e0" + +write-file-atomic@^1.1.2: + version "1.3.1" + resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-1.3.1.tgz#7d45ba32316328dd1ec7d90f60ebc0d845bb759a" + dependencies: + graceful-fs "^4.1.11" + imurmurhash "^0.1.4" + slide "^1.1.5" + +write-file-stdout@0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/write-file-stdout/-/write-file-stdout-0.0.2.tgz#c252d7c7c5b1b402897630e3453c7bfe690d9ca1" + +write-json-file@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/write-json-file/-/write-json-file-2.0.0.tgz#0eaec981fcf9288dbc2806cbd26e06ab9bdca4ed" + dependencies: + graceful-fs "^4.1.2" + mkdirp "^0.5.1" + pify "^2.0.0" + sort-keys "^1.1.1" + write-file-atomic "^1.1.2" + +write-pkg@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/write-pkg/-/write-pkg-2.0.0.tgz#93b922ee9a429f9bd74cdc69e549733c9e468156" + dependencies: + write-json-file "^2.0.0" + +write@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/write/-/write-0.2.1.tgz#5fc03828e264cea3fe91455476f7a3c566cb0757" + dependencies: + mkdirp "^0.5.1" + +xdg-basedir@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-1.0.1.tgz#14ff8f63a4fdbcb05d5b6eea22b36f3033b9f04e" + dependencies: + user-home "^1.0.0" + +xdg-basedir@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-2.0.0.tgz#edbc903cc385fc04523d966a335504b5504d1bd2" + dependencies: + os-homedir "^1.0.0" + +xo-init@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/xo-init/-/xo-init-0.4.0.tgz#e92562e38117eb71e55b8d34ee2d006252a49d6a" + dependencies: + arrify "^1.0.0" + execa "^0.5.0" + minimist "^1.1.3" + path-exists "^3.0.0" + read-pkg-up "^2.0.0" + the-argv "^1.0.0" + write-pkg "^2.0.0" + +xo@^0.17.1: + version "0.17.1" + resolved "https://registry.yarnpkg.com/xo/-/xo-0.17.1.tgz#de65bc8120474fa76104f8a80b3b792d88c50ef6" + dependencies: + arrify "^1.0.0" + debug "^2.2.0" + deep-assign "^1.0.0" + eslint "^3.6.0" + eslint-config-xo "^0.17.0" + eslint-formatter-pretty "^1.0.0" + eslint-plugin-ava "^3.1.0" + eslint-plugin-import "^2.0.0" + eslint-plugin-no-use-extend-native "^0.3.2" + eslint-plugin-promise "^3.0.0" + eslint-plugin-unicorn "^1.0.0" + get-stdin "^5.0.0" + globby "^6.0.0" + has-flag "^2.0.0" + meow "^3.4.2" + multimatch "^2.1.0" + parse-gitignore "^0.3.1" + path-exists "^3.0.0" + pkg-conf "^2.0.0" + resolve-cwd "^1.0.0" + resolve-from "^2.0.0" + update-notifier "^1.0.0" + xo-init "^0.4.0" + +"xtend@>=4.0.0 <4.1.0-0", xtend@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.1.tgz#a5c6d532be656e23db820efb943a1f04998d63af" + +y18n@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-3.2.1.tgz#6d15fba884c08679c0d77e88e7759e811e07fa41" + +yallist@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.0.0.tgz#306c543835f09ee1a4cb23b7bce9ab341c91cdd4" + +yargs-parser@^2.4.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-2.4.1.tgz#85568de3cf150ff49fa51825f03a8c880ddcc5c4" + dependencies: + camelcase "^3.0.0" + lodash.assign "^4.0.6" + +yargs@^1.2.6: + version "1.3.3" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-1.3.3.tgz#054de8b61f22eefdb7207059eaef9d6b83fb931a" + +yargs@^3.5.4, yargs@~3.10.0: + version "3.10.0" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-3.10.0.tgz#f7ee7bd857dd7c1d2d38c0e74efbd681d1431fd1" + dependencies: + camelcase "^1.0.2" + cliui "^2.1.0" + decamelize "^1.0.0" + window-size "0.1.0" + +yargs@^4.3.2: + version "4.8.1" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-4.8.1.tgz#c0c42924ca4aaa6b0e6da1739dfb216439f9ddc0" + dependencies: + cliui "^3.2.0" + decamelize "^1.1.1" + get-caller-file "^1.0.1" + lodash.assign "^4.0.3" + os-locale "^1.4.0" + read-pkg-up "^1.0.1" + require-directory "^2.1.1" + require-main-filename "^1.0.1" + set-blocking "^2.0.0" + string-width "^1.0.1" + which-module "^1.0.0" + window-size "^0.2.0" + y18n "^3.2.1" + yargs-parser "^2.4.1" + +yargs@~3.15.0: + version "3.15.0" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-3.15.0.tgz#3d9446ef21fb3791b3985690662e4b9683c7f181" + dependencies: + camelcase "^1.0.2" + cliui "^2.1.0" + decamelize "^1.0.0" + window-size "^0.1.1" From ff2804214b04437f47c6b9db323739f353e39e56 Mon Sep 17 00:00:00 2001 From: X O Date: Thu, 23 Feb 2017 21:27:47 +1030 Subject: [PATCH 032/344] force LF line endings (#2275) --- .gitattributes | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitattributes b/.gitattributes index 661add7183..117bf51f1b 100644 --- a/.gitattributes +++ b/.gitattributes @@ -3,6 +3,7 @@ # Handle line endings automatically for files detected as text # and leave all files detected as binary untouched. * text=auto +* eol=lf # # The above will handle all files NOT found below From a9e0d0ba2b00206c0a353aabe8f1919126c443fd Mon Sep 17 00:00:00 2001 From: Fernando Date: Thu, 23 Feb 2017 09:06:56 -0300 Subject: [PATCH 033/344] Fix mako no detecting add|remove of root folders. Populate with JS (#2271) * Fix removing/adding root folder not updating the select. Populate using JS Mako loads one the code in the import, so it won't see that app.ROOT_DIRS changes * Rename var to better understanding + review --- medusa/__main__.py | 4 ++-- medusa/app.py | 2 +- medusa/server/api/v2/config.py | 12 ++++++------ medusa/server/web/home/handler.py | 6 +++--- static/js/home/index.js | 26 +++++++++++++++++++++++--- tests/apiv2/test_config.py | 3 ++- views/home.mako | 14 +------------- 7 files changed, 38 insertions(+), 29 deletions(-) diff --git a/medusa/__main__.py b/medusa/__main__.py index 475388a3cc..6e31a85dd6 100755 --- a/medusa/__main__.py +++ b/medusa/__main__.py @@ -911,7 +911,7 @@ def initialize(self, console_logging=True): app.RELEASES_IN_PP = [] app.GIT_REMOTE_BRANCHES = [] app.KODI_LIBRARY_CLEAN_PENDING = False - app.SHOWS_ROOT = check_setting_int(app.CFG, 'GUI', 'shows_root', -1) + app.SELECTED_ROOT = check_setting_int(app.CFG, 'GUI', 'selected_root', -1) # reconfigure the logger app_logger.reconfigure() @@ -1467,7 +1467,7 @@ def save_config(): new_config['General']['display_all_seasons'] = int(app.DISPLAY_ALL_SEASONS) new_config['General']['news_last_read'] = app.NEWS_LAST_READ new_config['General']['broken_providers'] = helpers.get_broken_providers() or app.BROKEN_PROVIDERS - new_config['General']['shows_root'] = int(app.SHOWS_ROOT) + new_config['General']['selected_root'] = int(app.SELECTED_ROOT) new_config['Blackhole'] = {} new_config['Blackhole']['nzb_dir'] = app.NZB_DIR diff --git a/medusa/app.py b/medusa/app.py index 33fc5d1b0b..63e62d340a 100644 --- a/medusa/app.py +++ b/medusa/app.py @@ -511,7 +511,7 @@ POSTER_SORTDIR = None FANART_BACKGROUND = None FANART_BACKGROUND_OPACITY = None -SHOWS_ROOT = None +SELECTED_ROOT = None USE_SUBTITLES = False SUBTITLES_LANGUAGES = [] diff --git a/medusa/server/api/v2/config.py b/medusa/server/api/v2/config.py index fbd5e4d869..a58853c4a3 100644 --- a/medusa/server/api/v2/config.py +++ b/medusa/server/api/v2/config.py @@ -129,7 +129,8 @@ def get(self, query=''): 'allSeasons': bool(app.DISPLAY_ALL_SEASONS), 'specials': bool(app.DISPLAY_SHOW_SPECIALS) } - } + }, + 'selectedRootIndex': int(app.SELECTED_ROOT) if app.SELECTED_ROOT else None } if query and query not in config_data: @@ -240,11 +241,10 @@ def patch(self, *args, **kwargs): # if 'host' in data['torrents']: # if 'rpcurl' in data['torrents']: # if 'authType' in data['torrents']: - if key == 'showsRoot': - root_id = int(data['showsRoot']['id']) - app.SHOWS_ROOT = root_id - done_data.setdefault('showsRoot', {}) - done_data['showsRoot'].setdefault('id', root_id) + if key == 'selectedRootIndex': + root_id = int(data['selectedRootIndex']) + app.SELECTED_ROOT = root_id + done_data['selectedRootIndex'] = root_id if key == 'layout': done_data.setdefault('layout', {}) if 'schedule' in data['layout']: diff --git a/medusa/server/web/home/handler.py b/medusa/server/web/home/handler.py index a99ea352bf..2d2fc37336 100644 --- a/medusa/server/web/home/handler.py +++ b/medusa/server/web/home/handler.py @@ -58,11 +58,11 @@ def _genericMessage(self, subject, message): def index(self): t = PageTemplate(rh=self, filename='home.mako') - shows_root = int(app.SHOWS_ROOT) - if shows_root is not None and app.ROOT_DIRS: + selected_root = int(app.SELECTED_ROOT) + if selected_root is not None and app.ROOT_DIRS: backend_pieces = app.ROOT_DIRS.split('|') backend_dirs = backend_pieces[1:] - shows_dir = backend_dirs[shows_root] if shows_root != -1 else None + shows_dir = backend_dirs[selected_root] if selected_root != -1 else None shows = [] if app.ANIME_SPLIT_HOME: diff --git a/static/js/home/index.js b/static/js/home/index.js index 99762a9563..d7100691e7 100644 --- a/static/js/home/index.js +++ b/static/js/home/index.js @@ -337,9 +337,7 @@ MEDUSA.home.index = function() { $('#showRootDir').on('change', function() { api.patch('config', { - showsRoot: { - id: $(this).val() - } + selectedRootIndex: $(this).val() }).then(function(response) { log.info(response); window.location.reload(); @@ -347,4 +345,26 @@ MEDUSA.home.index = function() { log.info(err); }); }); + + var rootDir = MEDUSA.config.rootDirs; + var rootDirIndex = MEDUSA.config.selectedRootIndex; + if (rootDir) { + backendPieces = rootDir.split('|'); + backendDirs = backendPieces.slice(1); + } + if (backendDirs.length >= 2) { + $('#showRoot').show(); + var item = ['All Folders']; + rootDirOptions = item.concat(backendDirs); + $.each(rootDirOptions, function (i, item) { + $('#showRootDir').append($('
    From 113ed962d27ae7b610cb504ea16e814b1ff4ad3d Mon Sep 17 00:00:00 2001 From: Fernando Date: Fri, 14 Apr 2017 13:37:56 -0300 Subject: [PATCH 335/344] Add missing indexer sorting for poster (#2593) * Add missing indexer sorting for poster * Fix indexer sorting for small poster and banner --- views/home.mako | 1 + views/partials/home/banner.mako | 2 +- views/partials/home/small.mako | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/views/home.mako b/views/home.mako index 498ccd0b11..fc5fbecbe9 100644 --- a/views/home.mako +++ b/views/home.mako @@ -33,6 +33,7 @@ +
    diff --git a/views/partials/home/banner.mako b/views/partials/home/banner.mako index 66288e0bf7..ad31cb1c2c 100644 --- a/views/partials/home/banner.mako +++ b/views/partials/home/banner.mako @@ -163,7 +163,7 @@ [trakt] % endif - + ${indexerApi(cur_show.indexer).name} diff --git a/views/partials/home/small.mako b/views/partials/home/small.mako index 4711398578..edffba6933 100644 --- a/views/partials/home/small.mako +++ b/views/partials/home/small.mako @@ -163,7 +163,7 @@ [trakt] % endif - + ${indexerApi(cur_show.indexer).name} From c5c96b74deff7e034d2ee4c4648a59b97d394899 Mon Sep 17 00:00:00 2001 From: Fernando Date: Fri, 14 Apr 2017 19:09:30 -0300 Subject: [PATCH 336/344] Don't allow user submit error with EpisodeNotFoundException (#2592) * Don't allow user submit error with EpisodeNotFoundException * Update series.py * fixup --- medusa/tv/series.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/medusa/tv/series.py b/medusa/tv/series.py index c212632d11..7887bf6ed2 100644 --- a/medusa/tv/series.py +++ b/medusa/tv/series.py @@ -1048,8 +1048,9 @@ def make_ep_from_file(self, filepath): if not cur_ep: raise EpisodeNotFoundException except EpisodeNotFoundException: - logger.error(u'{indexerid}: Unable to figure out what this file is, skipping {filepath}', - indexerid=self.indexerid, filepath=filepath) + logger.warning(u"{indexerid}: Episode not found for this show: '{show}'. " + u'Please run a full update. Skipping file: {filepath}', + indexerid=self.indexerid, show=self.name, filepath=filepath) continue else: From cbeccbfcb2dccb09ec09e228c79c884ef69e7ff8 Mon Sep 17 00:00:00 2001 From: Rato Date: Mon, 17 Apr 2017 12:02:21 +0200 Subject: [PATCH 337/344] Refactoring api v2 (#2413) * Refactoring api v2 * Fixing flake issues and test issues * More refactoring * More api v2 refactoring + ui fixes * Fixing tests * More refactoring * Scene exceptions api v2 refactoring * Fixing js issue * Adding Link and Location headers. Several minor fixes * Removing snake_case in query parameter * Fixing api key + token check * Minor fix * Removing snake_case in query parameter * Minor fixes in log endpoint * Minor fix in alias-source endpoint * Minor fix * Enhancing episode json schema * Updating dredd configuration * Fixing failing test * Add dredd_hooks to dependancies * Move test-api to after tox ... so `dredd_hooks` will already be installed * import dredd_hooks in travis * fix dredd * fix travis dredd * Fix wrong log and wrong attribute * Handling HTTP 4xx codes * Fixing failing travis * Improving sort query parameter to accept multiple properties * Refactoring identifiers * Fixing dredd test --- .travis.yml | 19 +- api-description.yml | 342 ---- dredd/.gitignore | 1 + dredd/api-description.yml | 1801 ++++++++++++++++++ dredd.yml => dredd/dredd.yml | 13 +- dredd/dredd_hook.py | 173 ++ dredd_hook.py | 34 - medusa/db.py | 8 +- medusa/helper/collections.py | 11 + medusa/image_cache.py | 13 +- medusa/indexers/indexer_config.py | 14 +- medusa/logger/__init__.py | 14 +- medusa/server/api/v2/alias.py | 173 ++ medusa/server/api/v2/alias_source.py | 92 + medusa/server/api/v2/asset.py | 36 - medusa/server/api/v2/auth.py | 77 +- medusa/server/api/v2/base.py | 386 +++- medusa/server/api/v2/config.py | 465 ++--- medusa/server/api/v2/episode.py | 63 + medusa/server/api/v2/log.py | 67 +- medusa/server/api/v2/scene_exception.py | 221 --- medusa/server/api/v2/series.py | 129 ++ medusa/server/api/v2/series_asset.py | 36 + medusa/server/api/v2/series_legacy.py | 48 + medusa/server/api/v2/series_operation.py | 46 + medusa/server/api/v2/show.py | 207 -- medusa/server/api/v2/status.py | 16 - medusa/server/core.py | 77 +- medusa/show/coming_episodes.py | 2 + medusa/tv/__init__.py | 1 + medusa/tv/base.py | 19 +- medusa/tv/episode.py | 303 ++- medusa/tv/indexer.py | 54 + medusa/tv/series.py | 433 +++-- package.json | 6 +- static/js/common/init.js | 3 +- static/js/config/index.js | 2 +- static/js/core.js | 5 +- static/js/history/index.js | 2 +- static/js/home/display-show.js | 2 +- static/js/home/edit-show.js | 3 +- static/js/home/index.js | 4 +- static/js/manage/backlog-overview.js | 4 +- static/js/manage/manage-searches.js | 14 +- static/js/plot-tooltip.js | 4 +- static/js/quality-chooser.js | 18 +- static/js/schedule/index.js | 2 +- tests/apiv2/conftest.py | 3 +- tests/apiv2/test_config.py | 234 +-- tests/apiv2/test_log.py | 2 +- tests/apiv2/{test_show.py => test_series.py} | 4 +- tests/test_server_base.py | 133 ++ tests/test_tv_identifiers.py | 186 ++ tox.ini | 3 +- views/displayShow.mako | 1 + views/editShow.mako | 2 +- views/history.mako | 3 +- views/home.mako | 6 +- views/partials/home/banner.mako | 4 +- views/partials/home/poster.mako | 4 +- views/partials/home/small.mako | 4 +- views/partials/showheader.mako | 7 +- views/schedule.mako | 6 +- views/snatchSelection.mako | 1 + views/status.mako | 6 +- views/testRename.mako | 1 + views/viewlogs.mako | 6 +- yarn.lock | 691 ++++++- 68 files changed, 5067 insertions(+), 1703 deletions(-) delete mode 100644 api-description.yml create mode 100644 dredd/.gitignore create mode 100644 dredd/api-description.yml rename dredd.yml => dredd/dredd.yml (76%) create mode 100644 dredd/dredd_hook.py delete mode 100644 dredd_hook.py create mode 100644 medusa/helper/collections.py create mode 100644 medusa/server/api/v2/alias.py create mode 100644 medusa/server/api/v2/alias_source.py delete mode 100644 medusa/server/api/v2/asset.py create mode 100644 medusa/server/api/v2/episode.py delete mode 100644 medusa/server/api/v2/scene_exception.py create mode 100644 medusa/server/api/v2/series.py create mode 100644 medusa/server/api/v2/series_asset.py create mode 100644 medusa/server/api/v2/series_legacy.py create mode 100644 medusa/server/api/v2/series_operation.py delete mode 100644 medusa/server/api/v2/show.py delete mode 100644 medusa/server/api/v2/status.py create mode 100644 medusa/tv/indexer.py rename tests/apiv2/{test_show.py => test_series.py} (81%) create mode 100644 tests/test_server_base.py create mode 100644 tests/test_tv_identifiers.py diff --git a/.travis.yml b/.travis.yml index 6a1d968652..4c151629fb 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,24 +4,29 @@ python: sudo: false branches: except: - - master + - master install: - pip install --upgrade pip - pip install --upgrade tox + - pip install dredd_hooks + - pip install PyYAML + - pip install six - nvm install 6.9.1 - nvm use 6.9.1 - - curl -o- -L https://yarnpkg.com/install.sh | bash - - export PATH="$HOME/.yarn/bin:$PATH" && yarn install + - 'curl -o- -L https://yarnpkg.com/install.sh | bash' + - 'export PATH="$HOME/.yarn/bin:$PATH" && yarn install' script: - yarn test-js - tox -v --recreate + - yarn test-api cache: yarn: true directories: - - $HOME/.cache/pip - - node_modules + - $HOME/.cache/pip + - node_modules after_failure: - - cat ./Logs/application.log + - cat ./dredd/data/Logs/application.log notifications: slack: - secure: YYOoxBgy4+iAIWylJX0ndT+KwctRzSL/8mUOPErIJOaGAwU6w9JT2WyO1uP/xq9xm+MjgGWqKJ7bpwGRJ12bCkP4mPcnn5A7c/UqFcCgwVgWfKdI/5EeHB6RfzK76J01amenN69/hzt5WjifE4wCONfJAcftKxylh69kWV5QipDcZZA//yQiO9BfYlsW3CxkHb3OGPHSJVYb32IdfCn4hnt3WaWkentXBj5R0v0kyNKbkFqQ5OGPlsjrYpxDMF8vgduxmg8zGw2tXjWGIC2bWuPoAurZy0ewyoKIna82wgkNySFjNBXoZCtssW7yPFJjUQHP/lHIZa4gLmA5Gdli7WoaN/lYaoGXlxAXSskfvgGXin92CRHukSbzEdzQznPhzxzIcuJA2je0gAvmIn2nw0itUGKbVQBZTV3nRFJb3iEHaodC0+1zozGQASxfXQzzBuHU6ZUAFWzlMNQ80RjuiS5951mmgIBo8fOfTWkVFTX8ayEfGbYhqoJqJ5QMjTjoEt8SYKrlHdlDBh803LmKOsID9B8dDn0onXlYNZAioqTTFb/xqL95aCDr84PKYbNSfraqPU6hsSc8ITtxeMS454k8BGxzed0s8bKsCDQP7HXmYKbShByMYX8NipuhtEDXeCGyCLX3atoO0qFiZ0/sUXXf67w/14eLRBAdKfnr02I= + secure: >- + YYOoxBgy4+iAIWylJX0ndT+KwctRzSL/8mUOPErIJOaGAwU6w9JT2WyO1uP/xq9xm+MjgGWqKJ7bpwGRJ12bCkP4mPcnn5A7c/UqFcCgwVgWfKdI/5EeHB6RfzK76J01amenN69/hzt5WjifE4wCONfJAcftKxylh69kWV5QipDcZZA//yQiO9BfYlsW3CxkHb3OGPHSJVYb32IdfCn4hnt3WaWkentXBj5R0v0kyNKbkFqQ5OGPlsjrYpxDMF8vgduxmg8zGw2tXjWGIC2bWuPoAurZy0ewyoKIna82wgkNySFjNBXoZCtssW7yPFJjUQHP/lHIZa4gLmA5Gdli7WoaN/lYaoGXlxAXSskfvgGXin92CRHukSbzEdzQznPhzxzIcuJA2je0gAvmIn2nw0itUGKbVQBZTV3nRFJb3iEHaodC0+1zozGQASxfXQzzBuHU6ZUAFWzlMNQ80RjuiS5951mmgIBo8fOfTWkVFTX8ayEfGbYhqoJqJ5QMjTjoEt8SYKrlHdlDBh803LmKOsID9B8dDn0onXlYNZAioqTTFb/xqL95aCDr84PKYbNSfraqPU6hsSc8ITtxeMS454k8BGxzed0s8bKsCDQP7HXmYKbShByMYX8NipuhtEDXeCGyCLX3atoO0qFiZ0/sUXXf67w/14eLRBAdKfnr02I= diff --git a/api-description.yml b/api-description.yml deleted file mode 100644 index 4f70853bf2..0000000000 --- a/api-description.yml +++ /dev/null @@ -1,342 +0,0 @@ -swagger: '2.0' -info: - title: Medusa API - description: DESCRIPTION - version: "1.0.0" -host: localhost:8081 -schemes: - - http - - https -securityDefinitions: - x-api-key: - type: apiKey - name: x-api-key - in: query - Bearer: - type: apiKey - name: Authorization - in: header -basePath: /api/v2 -produces: - - application/json -security: [ { x-api-key: [] }, { Bearer: [] } ] -paths: - /authenticate: - post: - security: [] - summary: "Returns a JWT for the provided user. This is required for all other routes." - description: | - DESCRIPTION. - responses: - 200: - description: "The JWT for the user." - schema: - type: string - 401: - description: "No credentials provided or invalid credentials." - schema: - type: object - parameters: - - name: auth - in: body - description: auth object - required: true - schema: - $ref: '#/definitions/Auth' - /show: - get: - summary: "Returns all shows in Medusa that the user has access to." - description: | - The Show endpoint returns information about the Shows added to Medusa. - responses: - 200: - description: "An array of shows" - schema: - type: array - items: - $ref: '#/definitions/Show' - default: - description: "Unexpected error" - schema: - $ref: '#/definitions/Error' - /config: - get: - summary: "Returns all config values for Medusa." - description: | - DESCRIPTION - responses: - 200: - description: "An object containing all the config values." - schema: - type: object - properties: - wikiUrl: - type: string - themeName: - type: string - namingForceFolders: - type: boolean - databaseVersion: - type: object - properties: - major: - type: integer - minor: - type: integer - layout: - type: object - properties: - show: - type: object - properties: - specials: - type: boolean - allSeasons: - type: boolean - home: - type: string - history: - type: string - schedule: - type: string - trimZero: - type: boolean - configFile: - type: string - animeSplitHome: - type: boolean - sortArticle: - type: boolean - sourceUrl: - type: string - fanartBackgroundOpacity: - type: integer - emby: - type: object - properties: - enabled: - type: boolean - logDir: - type: string - posterSortby: - type: string - subtitles: - type: object - properties: - enabled: - type: boolean - fuzzyDating: - type: boolean - timePreset: - type: string - kodi: - type: object - properties: - enabled: - type: boolean - dbFilename: - type: string - pythonVersion: - type: string - downloadUrl: - type: string - nzb: - type: object - properties: - username: - type: string - host: - type: string - password: - type: string - enabled: - type: boolean - priority: - type: integer - release: - type: string - posterSortDir: - type: string - locale: - type: string - webRoot: - type: string - torrents: - type: object - properties: - username: - type: string - seedTime: - type: integer - rpcurl: - type: string - authType: - type: string - paused: - type: boolean - host: - type: string - path: - type: string - password: - type: string - verifySSL: - type: boolean - highBandwidth: - type: boolean - enabled: - type: boolean - label: - type: string - labelAnime: - type: string - method: - type: string - enum: ["blackhole", "utorrent", "transmission", "deluge", "deluged", "download_station", "rtorrent", "qbittorrent", "mlnet"] -definitions: - Show: - type: object - properties: - id: - type: object - description: | - All of the ids mapped to a certain show, atleast one of these is required. - properties: - tvdb: - type: string - description: "This is the ID from thetvdb.com" - tvmaze: - type: string - description: "This is the ID from tvmaze.com" - imdb: - type: string - description: "This is the ID from imdb.com" - title: - type: string - description: | - This is the title of the show, this should be in the main - language the show was originally created in. If other - titles exist for other languages they will be part of the "akas" field. - indexer: - type: string - network: - type: string - type: - type: string - status: - type: string - airs: - type: string - language: - type: string - showType: - type: string - akas: - type: object - year: - type: object - airDates: - type: object - items: - type: string - runtime: - type: integer - genres: - type: array - items: - type: string - rating: - type: object - properties: - imdb: - type: object - properties: - stars: - type: string - votes: - type: integer - classification: - type: string - cache: - type: object - countries: - type: array - items: - type: string - config: - type: object - properties: - location: - type: string - qualities: - type: object - properties: - allowed: - type: array - items: - type: string - prefered: - type: array - items: - type: string - paused: - type: boolean - airByDate: - type: boolean - subtitlesEnabled: - type: boolean - dvdOrder: - type: boolean - flattenFolders: - type: boolean - scene: - type: boolean - defaultEpisodeStatus: - type: string - aliases: - type: array - items: - type: string - release: - type: object - properties: - blacklist: - type: array - items: - type: string - whitelist: - type: array - items: - type: string - ignoredWords: - type: array - items: - type: string - requiredWords: - type: array - items: - type: string - Auth: - description: Auth object - properties: - username: - type: string - password: - type: string - exp: - type: integer - minimum: 3600 - maximum: 31536000 - required: - - username - - password - Error: - type: object - properties: - code: - type: integer - format: int32 - message: - type: string - fields: - type: string diff --git a/dredd/.gitignore b/dredd/.gitignore new file mode 100644 index 0000000000..1269488f7f --- /dev/null +++ b/dredd/.gitignore @@ -0,0 +1 @@ +data diff --git a/dredd/api-description.yml b/dredd/api-description.yml new file mode 100644 index 0000000000..c51d12359e --- /dev/null +++ b/dredd/api-description.yml @@ -0,0 +1,1801 @@ +swagger: '2.0' +info: + title: Medusa API + description: DESCRIPTION + version: "1.0.0" +host: localhost:8081 +schemes: + - http + - https +securityDefinitions: + x-api-key: + type: apiKey + name: x-api-key + in: query + Bearer: + type: apiKey + name: Authorization + in: header +basePath: /api/v2 +consumes: + - "application/json" +produces: + - "application/json; charset=UTF-8" + - "text/plain; charset=UTF-8" + - "image/jpeg" +security: [ { x-api-key: [] }, { Bearer: [] } ] +paths: + /series: + post: + summary: Add series + description: | + Given an indexer and its id, adds the series to Medusa. + parameters: + - name: series + in: body + required: true + description: Only id (with indexer information) should be specified + schema: + $ref: '#/definitions/Series' + example: + id: + tvdb: 301824 + responses: + 201: + description: Series added + headers: + Location: + type: string + description: The location of the newly added series + schema: + $ref: '#/definitions/Series' + 400: + $ref: '#/responses/error' + description: Invalid request + x-request: + body: {} + 404: + $ref: '#/responses/error' + description: Series not found in the indexer + x-request: + body: + id: + tvdb: 99999999 + 409: + $ref: '#/responses/error' + description: Series already added + get: + summary: Return series that the user has access to + description: | + The Series endpoint returns information about the Series added to Medusa. + parameters: + - name: paused + in: query + required: false + description: Filter series based on paused status + type: boolean + - $ref: '#/parameters/detailed' + - $ref: '#/parameters/page' + - $ref: '#/parameters/limit' + - $ref: '#/parameters/sort' + responses: + 200: + $ref: '#/responses/pagination' + description: A paged array of series + schema: + type: array + items: + $ref: '#/definitions/Series' + 400: + $ref: '#/responses/error' + description: Invalid pagination parameters + x-request: + query-params: + sort: abc + /series/{id}: + get: + summary: Return information about a specific series + description: Retrieves information about a specific series + parameters: + - $ref: '#/parameters/series-id' + name: id + responses: + 200: + description: The series information + schema: + $ref: '#/definitions/Series' + 400: + $ref: '#/responses/error' + description: Invalid id + x-request: + path-params: + id: 123456 + 404: + $ref: '#/responses/error' + description: Series not found + x-request: + path-params: + id: tvdb999999999 + patch: + summary: Partial update series + description: Partial update series + parameters: + - $ref: '#/parameters/series-id' + name: id + - name: series + in: body + required: false + description: Currently, only pause field is supported + schema: + $ref: '#/definitions/Series' + responses: + 200: + description: Response with only the updated fields + schema: + $ref: '#/definitions/Series' + 400: + $ref: '#/responses/error' + description: Invalid id + x-request: + body: + id: + tvdb: 80379 + path-params: + id: tvdb301824 + 404: + $ref: '#/responses/error' + description: Series not found + x-request: + body: + id: + tvdb: 999999999 + path-params: + id: tvdb999999999 + delete: + summary: Delete a series + description: Delete a series + parameters: + - $ref: '#/parameters/series-id' + name: id + - name: remove-files + in: query + required: false + description: Whether files from the series should be removed + type: boolean + responses: + 204: + description: Series is deleted successfully + 400: + $ref: '#/responses/error' + description: Invalid id + x-request: + path-params: + id: 123456 + 404: + $ref: '#/responses/error' + description: Series not found + x-request: + path-params: + id: tvdb999999999 + 409: + $ref: '#/responses/error' + description: Unable to delete series + /series/{id}/{field}: + get: + summary: Return a specific field from a given series + description: Retrieves a specific field from a given series + parameters: + - $ref: '#/parameters/series-id' + name: id + - name: field + in: path + required: true + description: Any series field + x-example: network + type: string + responses: + 200: + description: The series information + schema: {} + 400: + $ref: '#/responses/error' + description: Invalid field + x-request: + path-params: + field: abc + 404: + $ref: '#/responses/error' + description: Series not found + x-request: + path-params: + id: tvdb999999999 + /series/{seriesid}/episode: + get: + summary: Return episodes from a given series + description: | + The Episode endpoint returns information about the Episodes from a given Series. + parameters: + - $ref: '#/parameters/series-id' + name: seriesid + - name: season + in: query + required: false + description: The episode season + type: integer + format: int32 + - $ref: '#/parameters/detailed' + - $ref: '#/parameters/page' + - $ref: '#/parameters/limit' + - $ref: '#/parameters/sort' + responses: + 200: + $ref: '#/responses/pagination' + description: A paged array of episodes + schema: + type: array + items: + $ref: '#/definitions/Episode' + 400: + $ref: '#/responses/error' + description: Invalid series id or pagination parameters + x-request: + query-params: + sort: abc + 404: + $ref: '#/responses/error' + description: Series not found + x-request: + path-params: + seriesid: tvdb999999999 + /series/{seriesid}/episode/{id}: + get: + summary: Return a specific episode from a given series + description: Retrieve a specific episode from a given series + parameters: + - $ref: '#/parameters/series-id' + name: seriesid + - $ref: '#/parameters/episode-id' + name: id + - $ref: '#/parameters/detailed' + responses: + 200: + description: The episode information + schema: + $ref: '#/definitions/Episode' + 400: + $ref: '#/responses/error' + description: Invalid series or episode id + x-request: + path-params: + id: abc + 404: + $ref: '#/responses/error' + description: Series or episode not found + x-request: + path-params: + id: e999 + /series/{seriesid}/episode/{id}/{field}: + get: + summary: Return a specific field from a given episode + description: Retrieve a specific field from a given episode + parameters: + - $ref: '#/parameters/series-id' + name: seriesid + - $ref: '#/parameters/episode-id' + name: id + - name: field + in: path + required: true + description: The episode field + x-example: airDate + type: string + responses: + 200: + description: The episode field value + schema: {} + 400: + $ref: '#/responses/error' + description: Invalid series or episode id or invalid field + x-request: + path-params: + field: abc + 404: + $ref: '#/responses/error' + description: Series or episode not found + x-request: + path-params: + id: "2050-12-31" + /series/{seriesid}/episode/{id}/metadata: + get: + summary: Return the video metadata from a specific episode + description: Retrieve the video metadata from a specific episode + parameters: + - $ref: '#/parameters/series-id' + name: seriesid + - $ref: '#/parameters/episode-id' + name: id + responses: + 200: + description: The video metadata + schema: + $ref: '#/definitions/Metadata' + 400: + $ref: '#/responses/error' + description: Invalid series or episode id + x-request: + path-params: + id: s01 + 404: + $ref: '#/responses/error' + description: Series or episode not found + x-request: + path-params: + id: s99e99 + /series/{seriesid}/asset/{id}: + get: + summary: Return a specific asset from a given series + description: Retrieves a specific asset from a given series + produces: + - "image/jpeg" + parameters: + - $ref: '#/parameters/series-id' + name: seriesid + - $ref: '#/parameters/asset-id' + name: id + responses: + 200: + description: The asset stream + x-expect: + no-body: true + 400: + $ref: '#/responses/error' + description: Invalid series id + x-request: + path-params: + seriesid: abc + 404: + $ref: '#/responses/error' + description: Series or asset not found + x-request: + path-params: + id: abc + /series/{id}/operation: + post: + summary: Create an operation that relates to a specific series + description: > + Create an operation that relates to a specific series. + Currently only type='ARCHIVE_EPISODES' is supported + parameters: + - $ref: '#/parameters/series-id' + name: id + - name: operation + in: body + required: true + schema: + $ref: '#/definitions/Operation' + example: + type: ARCHIVE_EPISODES + responses: + 201: + description: "When type='ARCHIVE_EPISODES': episodes were archived" + x-disabled: true + 204: + description: "When type='ARCHIVE_EPISODES': no episode was archived" + 400: + $ref: '#/responses/error' + description: Invalid id or invalid operation type + x-request: + body: + type: SUPER_OPERATION + 404: + $ref: '#/responses/error' + description: Series not found + x-request: + path-params: + id: tvmaze999999999 + /alias: + get: + summary: Return existing aliases + description: Retrieve existing aliases + parameters: + - name: series + in: query + required: false + description: "Series identifier. E.g.: tvdb1234" + type: string + - name: season + in: query + required: false + description: The season number + type: integer + format: int32 + - name: type + in: query + required: false + description: Alias type + type: string + enum: [local] + - $ref: '#/parameters/page' + - $ref: '#/parameters/limit' + - $ref: '#/parameters/sort' + responses: + 200: + $ref: '#/responses/pagination' + description: A paged array of aliases + schema: + type: array + items: + $ref: '#/definitions/Alias' + 400: + $ref: '#/responses/error' + description: Invalid series or pagination parameters + x-request: + query-params: + limit: 0 + post: + summary: Create a new alias + description: Create a new alias + parameters: + - name: alias + in: body + required: true + schema: + $ref: '#/definitions/Alias' + responses: + 201: + description: Alias created + headers: + Location: + type: string + description: The location of the newly created alias + schema: + $ref: '#/definitions/Alias' + x-stash: + alias-id: "${body['id']}" + 400: + $ref: '#/responses/error' + description: Invalid request + x-request: + body: + type: local + 409: + $ref: '#/responses/error' + description: Unable to create alias + x-disabled: true + /alias/{id}/{field}: + get: + summary: Return a specific field from a given alias + description: Retrieve a specific field from a a given alias + parameters: + - $ref: '#/parameters/alias-id' + name: id + - name: field + in: path + required: true + description: Any alias field + x-example: name + type: string + responses: + 200: + description: The alias information + schema: {} + x-request: + path-params: + id: "${stash['alias-id']}" + 400: + $ref: '#/responses/error' + description: Invalid field + x-request: + path-params: + id: "${stash['alias-id']}" + field: abc + 404: + $ref: '#/responses/error' + description: Alias not found + x-request: + path-params: + id: 999999999 + /alias/{id}: + get: + summary: Return information about a given alias + description: Retrieves information about a given alias + parameters: + - $ref: '#/parameters/alias-id' + name: id + responses: + 200: + description: The alias information + schema: + $ref: '#/definitions/Alias' + x-request: + path-params: + id: "${stash['alias-id']}" + 404: + $ref: '#/responses/error' + description: Alias not found + x-request: + path-params: + id: 999999999 + put: + summary: Replace alias data + description: Replace alias data + parameters: + - $ref: '#/parameters/alias-id' + name: id + x-example: 1 + - name: alias + in: body + required: true + schema: + $ref: '#/definitions/Alias' + example: + $ref: '#/definitions/Alias/example' + id: 1 + responses: + 204: + description: Alias data replaced + x-request: + path-params: + id: "${stash['alias-id']}" + body: + id: "${stash['alias-id']}" + series: tvdb301824 + name: TheBig + type: local + 400: + $ref: '#/responses/error' + description: Invalid request + x-request: + body: + id: 1 + 404: + $ref: '#/responses/error' + description: Alias not found + x-request: + body: + id: 999999999 + series: tvdb301824 + name: TheBig + type: local + path-params: + id: 999999999 + delete: + summary: Delete an alias + description: Delete an alias + parameters: + - $ref: '#/parameters/alias-id' + name: id + x-example: 123456 + responses: + 204: + description: Alias deleted + x-request: + path-params: + id: "${stash['alias-id']}" + 404: + $ref: '#/responses/error' + description: Alias not found + x-request: + path-params: + id: 999999999 + /alias-source: + get: + summary: Return existing sources for aliases + description: Retrieve existing sources for aliases + parameters: + - $ref: '#/parameters/page' + - $ref: '#/parameters/limit' + - $ref: '#/parameters/sort' + responses: + 200: + $ref: '#/responses/pagination' + description: A paged array of alias-source + schema: + type: array + items: + $ref: '#/definitions/AliasSource' + 400: + $ref: '#/responses/error' + description: Invalid pagination parameters + x-request: + query-params: + page: abc + /alias-source/{id}: + get: + summary: Return a specific source for aliases + description: Retrieves a specific source for aliases + parameters: + - $ref: '#/parameters/alias-source-id' + name: id + responses: + 200: + description: The alias source information + schema: + $ref: '#/definitions/AliasSource' + 404: + $ref: '#/responses/error' + description: Alias source not found + x-request: + path-params: + id: abc + /alias-source/{id}/{field}: + get: + summary: Return a specific field from a given source for aliases + description: Retrieve a specific field from a given source for aliases + parameters: + - $ref: '#/parameters/alias-source-id' + name: id + - name: field + in: path + required: true + x-example: lastRefresh + type: string + responses: + 200: + description: The alias source field value + schema: {} + 400: + $ref: '#/responses/error' + description: Invalid field + x-request: + path-params: + field: abc + 404: + $ref: '#/responses/error' + description: Alias source not found + x-request: + path-params: + id: abc + /alias-source/{id}/operation: + post: + summary: Create an operation that relates to a given alias source + description: > + Create an operation that relates to a given alias source. + Currently only type='REFRESH' is supported + parameters: + - $ref: '#/parameters/alias-source-with-all-id' + name: id + - name: operation + in: body + required: true + schema: + $ref: '#/definitions/Operation' + example: + type: REFRESH + responses: + 201: + description: "When type='REFRESH': aliases for that source were refreshed" + schema: + $ref: '#/definitions/Operation' + 400: + $ref: '#/responses/error' + description: Invalid id or invalid operation type + x-request: + body: + type: SUPER_OPERATION + 404: + $ref: '#/responses/error' + description: Alias source not found + x-request: + path-params: + id: abc + /config: + get: + summary: Return configurations for Medusa + description: Retrieve configurations + parameters: + - $ref: '#/parameters/page' + - $ref: '#/parameters/limit' + - $ref: '#/parameters/sort' + responses: + 200: + $ref: '#/responses/pagination' + description: A paged array of config objects + schema: + type: array + items: + $ref: '#/definitions/Config' + 400: + $ref: '#/responses/error' + description: Invalid pagination parameters + x-request: + query-params: + page: 0 + /config/{id}: + get: + summary: Return information about a given configuration + description: Retrieve information about a given configuration + parameters: + - $ref: '#/parameters/config-id' + name: id + responses: + 200: + description: The configuration information + schema: + $ref: '#/definitions/Config' + 404: + $ref: '#/responses/error' + description: Configuration not found + x-request: + path-params: + id: super + /config/{id}/{field}: + get: + summary: Return a specific field from a given configuration + description: Retrieve a specific field from a given configuration + parameters: + - $ref: '#/parameters/config-id' + name: id + - name: field + in: path + required: true + description: Any configuration field + x-example: themeName + type: string + responses: + 200: + description: The configuration information + schema: {} + 400: + $ref: '#/responses/error' + description: Invalid field + x-request: + path-params: + field: abc + 404: + $ref: '#/responses/error' + description: Configuration not found + x-request: + path-params: + id: abc + /log: + get: + summary: Return log messages from the application. + description: Retrieve log messages. Default sorting is descending by timestamp + parameters: + - $ref: '#/parameters/log-level' + - $ref: '#/parameters/page' + - $ref: '#/parameters/limit' + - $ref: '#/parameters/sort' + responses: + 200: + $ref: '#/responses/pagination_stream' + description: A paged array of log messages + schema: + type: array + items: + $ref: '#/definitions/Log' + examples: + 'application/json': + - $ref: '#/definitions/Log/example' + - $ref: '#/definitions/Log/example' + - $ref: '#/definitions/Log/example' + 400: + $ref: '#/responses/error' + description: Invalid log level or pagination parameter + x-request: + query-params: + log-level: abc + post: + summary: Log a message + description: Log a message + parameters: + - name: log + in: body + required: true + schema: + $ref: '#/definitions/Log' + responses: + 201: + description: Log message successfully created + x-expect: + no-body: true + 400: + $ref: '#/responses/error' + description: Invalid request + x-request: + body: + level: error + /authenticate: + post: + security: [] + summary: Return a JWT for the provided user. This is required for all other routes + description: DESCRIPTION + parameters: + - name: auth + in: body + description: auth object + required: true + schema: + $ref: '#/definitions/Auth' + responses: + 200: + description: The JWT for the user + schema: + type: string + x-no-api-key: true + x-request: + body: + username: "${stash['web-username']}" + password: "${stash['web-password']}" + 401: + $ref: '#/responses/error' + description: No credentials provided or invalid credentials + x-request: + body: {} +definitions: + Series: + type: object + properties: + id: + type: object + description: | + All of the ids mapped to a certain Series, at least one of these is required. + properties: + tvdb: + type: integer + format: int32 + minimum: 1 + description: This is the ID from thetvdb.com + tvmaze: + type: integer + format: int32 + minimum: 1 + description: This is the ID from tvmaze.com + imdb: + type: string + description: This is the ID from imdb.com + example: + tvdb: 1234 + tvmaze: 5678 + imdb: tt90123 + title: + type: string + description: | + This is the title of the series, this should be in the main + language the series was originally created in. If other + titles exist for other languages they will be part of the "akas" field. + indexer: + type: string + description: "Indexer name" + example: tvdb + network: + type: string + example: CBS + type: + type: string + example: Scripted + status: + type: string + enum: [Continuing, Ended] + airs: + type: string + description: "Air time" + example: "Thursday 8:00 PM" + language: + type: string + description: Language code + example: en + showType: + type: string + enum: [series, anime, sports] + akas: + type: object + description: Also known as + additionalProperties: + type: string + year: + type: object + properties: + start: + type: integer + format: int32 + minimum: 1900 + maximum: 2200 + description: Starting year + end: + type: integer + format: int32 + minimum: 1900 + maximum: 2200 + description: End year. Available in detailed view + nextAirDate: + type: string + format: date-time + description: Next episode air date + runtime: + type: integer + minimum: 1 + format: int32 + description: Episodes runtime in minutes + genres: + type: array + items: + type: string + example: [Drama, Romance] + rating: + type: object + properties: + imdb: + type: object + properties: + stars: + type: string + description: "IMDB's star rating from 0 to 10" + votes: + type: integer + format: int32 + minimum: 1 + description: "Total number of votes" + example: + imdb: + stars: '8.3' + votes: 558507 + classification: + type: string + enum: ['TV-Y', 'TV-Y7', 'TV-G', 'TV-PG', 'TV-14', 'TV-MA'] + description: TV Parental Guidelines + cache: + type: object + description: Image cache locations + properties: + banner: + type: string + example: "/home/user/Medusa/cache/images/301824.banner.jpg" + poster: + type: string + example: /home/user/Medusa/cache/images/301824.poster.jpg + countries: + type: array + items: + type: string + description: Country codes + config: + type: object + description: Series configuration and preferences + properties: + location: + type: string + description: Series home folder + example: "/library/My Series" + qualities: + type: object + description: Qualities settings + properties: + allowed: + type: array + items: + type: string + prefered: + type: array + items: + type: string + paused: + type: boolean + description: Whether series is paused + airByDate: + type: boolean + description: Whether episodes are indexed by air date + subtitlesEnabled: + type: boolean + description: Whether subtitles download is enabled + dvdOrder: + type: boolean + description: Whether episode numbering follows DVD order instead of air date + flattenFolders: + type: boolean + description: Whether episodes are stored in a flatten folder structure + scene: + type: boolean + defaultEpisodeStatus: + type: string + description: Initial status for newly added episodes + aliases: + type: array + description: Local aliases for the series + items: + type: string + example: [MySeries, MS] + release: + type: object + description: Release configuration + properties: + blacklist: + type: array + description: Blacklisted release words + items: + type: string + whitelist: + type: array + description: Whitelisted release words + items: + type: string + ignoredWords: + type: array + items: + type: string + requiredWords: + type: array + description: Required release words + items: + type: string + seasons: + type: array + description: Episodes grouped by season. Available in detailed view. + items: + type: array + description: Episodes for the given season + items: + $ref: '#/definitions/Episode' + episodeCount: + type: integer + format: int32 + minimum: 0 + maximum: 10000 + description: Total number of episodes. Available in detailed view + Episode: + type: object + properties: + identifier: + type: string + example: s03e07 + id: + type: object + properties: + tvdb: + type: integer + format: int32 + description: This is the ID from thetvdb.com + tvmaze: + type: integer + format: int32 + description: This is the ID from tvmaze.com + imdb: + type: string + description: This is the ID from imdb.com + example: + tvdb: 2345 + tvmaze: 6789 + imdb: tt0123 + season: + type: integer + format: int32 + minimum: 0 + maximum: 1000 + episode: + type: integer + format: int32 + minimum: 0 + maximum: 10000 + absoluteNumber: + type: integer + format: int32 + minimum: 1 + maximum: 10000 + airDate: + type: string + format: date-time + title: + type: string + description: Episode title + description: + type: string + description: Episode plot + content: + type: array + description: Additional content + items: + type: string + enum: [NFO, thumbnail] + subtitles: + type: array + items: + type: string + description: Available subtitle languages + status: + type: string + description: Episode status + release: + type: object + description: The release details + properties: + name: + type: string + description: Original release name + group: + type: string + description: Original release group + proper: + type: boolean + description: Whether the release is proper + version: + type: integer + format: int32 + minimum: 0 + maximum: 10 + description: Episode version (common in animes) + scene: + type: object + properties: + season: + type: integer + format: int32 + minimum: 0 + maximum: 1000 + episode: + type: integer + format: int32 + minimum: 0 + maximum: 10000 + absoluteNumber: + type: integer + format: int32 + minimum: 1 + maximum: 10000 + file: + type: object + description: The episode file details + properties: + location: + type: string + description: The episode file location + example: '/library/My Series/Season 10/My Series - S03E07 - Super Episode.avi' + size: + type: integer + format: int64 + minimum: 1 + description: The file size in bytes + statistics: + type: object + description: Episode statistics. Available only in detailed view. + properties: + subtitleSearch: + type: object + description: subtitle search statistics + properties: + last: + type: string + format: date-time + description: Last subtitle search timestamp + count: + type: integer + format: int32 + minimum: 0 + description: search count + wantedQualities: + type: array + items: + type: string + relatedEpisodes: + type: array + example: [s03e08, s03e09] + items: + type: string + Auth: + description: Auth object + properties: + username: + type: string + password: + type: string + exp: + type: integer + minimum: 3600 + maximum: 31536000 + required: + - username + - password + Error: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + fields: + type: string + Operation: + type: object + properties: + type: + type: string + creation: + type: string + format: date-time + Config: + type: object + properties: + wikiUrl: + type: string + themeName: + type: string + namingForceFolders: + type: boolean + databaseVersion: + type: object + properties: + major: + type: integer + minor: + type: integer + layout: + type: object + properties: + show: + type: object + properties: + specials: + type: boolean + allSeasons: + type: boolean + home: + type: string + history: + type: string + schedule: + type: string + trimZero: + type: boolean + configFile: + type: string + animeSplitHome: + type: boolean + sortArticle: + type: boolean + sourceUrl: + type: string + fanartBackgroundOpacity: + type: number + emby: + type: object + properties: + enabled: + type: boolean + logDir: + type: string + posterSortby: + type: string + subtitles: + type: object + properties: + enabled: + type: boolean + fuzzyDating: + type: boolean + timePreset: + type: string + kodi: + type: object + properties: + enabled: + type: boolean + dbFilename: + type: string + pythonVersion: + type: string + downloadUrl: + type: string + nzb: + type: object + properties: + username: + type: string + host: + type: string + password: + type: string + enabled: + type: boolean + priority: + type: integer + release: + type: string + posterSortDir: + type: string + locale: + type: string + webRoot: + type: string + torrents: + type: object + properties: + username: + type: string + seedTime: + type: integer + rpcurl: + type: string + authType: + type: string + paused: + type: boolean + host: + type: string + path: + type: string + password: + type: string + verifySSL: + type: boolean + highBandwidth: + type: boolean + enabled: + type: boolean + label: + type: string + labelAnime: + type: string + method: + type: string + enum: ["blackhole", "utorrent", "transmission", "deluge", "deluged", "download_station", "rtorrent", "qbittorrent", "mlnet"] + Log: + type: object + properties: + timestamp: + type: string + format: date-time + level: + type: string + enum: [ERROR, WARNING, INFO, DEBUG, DB] + commit: + type: string + thread: + type: string + message: + type: string + threadId: + type: number + extra: + type: string + traceback: + type: array + items: + type: string + args: + type: array + items: + type: object + kwargs: + type: array + items: + type: object + example: + commit: '6a1db77' + level: INFO + message: Waiting for the DAILYSEARCHER thread to exit + thread: EVENT-QUEUE + timestamp: '2017-03-19 08:03:23' + AliasSource: + type: object + properties: + id: + type: string + lastRefresh: + type: integer + description: Last refresh in seconds since Epoch + Alias: + type: object + properties: + id: + type: integer + minimum: 1 + series: + type: string + name: + type: string + season: + type: integer + minimum: 0 + type: + type: string + enum: [local] + example: + series: tvdb301824 + name: TheBig + type: local + Metadata: + type: object + properties: + title: + type: string + description: Video title + path: + type: string + description: Video full path + duration: + type: string + description: Video duration + example: '0:19:39.208000' + size: + type: integer + format: int64 + minimum: 0 + description: Video file size in bytes + overall_bit_rate: + type: integer + format: int32 + minimum: 0 + description: Video overall bitrate + video: + type: array + items: + type: object + properties: + number: + type: integer + format: int32 + minimum: 0 + description: Track number + name: + type: string + description: Track name + language: + type: string + description: Track language + duration: + type: string + description: Track duration + size: + type: integer + format: int64 + minimum: 0 + description: Video stream size in bytes + width: + type: integer + format: int32 + minimum: 0 + description: Video width size (pixels) + height: + type: integer + format: int32 + minimum: 0 + description: Video height size (pixels) + scan_type: + type: string + enum: [Progressive, Interlaced] + description: Video scan type + aspect_ratio: + type: number + minimum: 0 + description: Video aspect ratio + pixel_aspect_ratio: + type: number + minimum: 0 + description: Pixel aspect ratio + resolution: + type: string + description: Video resolution + enum: + - "480i" + - "720i" + - "1080i" + - "2160i" + - "4320i" + - "480p" + - "720p" + - "1080p" + - "2160p" + - "4320p" + - "240i" + - "288i" + - "360i" + - "576i" + - "240p" + - "288p" + - "360p" + - "576p" + frame_rate: + type: number + minimum: 0 + description: Video frame rate (frames per second) + bit_depth: + type: integer + format: int32 + minimum: 0 + description: Video bit depth + bit_rate: + type: integer + format: int32 + minimum: 0 + description: Video bit rate + codec: + type: string + description: Video codec + enum: + - h263 + - h264 + - h265 + - Mpeg1 + - Mpeg2 + - MsMpeg4v2 + - MsMpeg4v3 + - Mpeg4 + - XviD + - DivX + - Jpeg + - Wmv1 + - Wmv2 + - Wmv3 + - VC1 + - QuickTime + - VP6 + - VP7 + - VP9 + profile: + type: string + description: Video codec profile + encoder: + type: string + description: Video encoder + media_type: + type: string + description: Video media type + forced: + type: boolean + description: Whether this track is forced + default: + type: boolean + description: Whether this track is default + audio: + type: array + items: + type: object + properties: + number: + type: integer + format: int32 + minimum: 0 + description: Track number + name: + type: string + description: Track name + language: + type: string + description: Track language + duration: + type: string + description: Track duration + size: + type: integer + format: int64 + minimum: 0 + description: Audio stream size in bytes + codec: + type: string + description: Audio codec + enum: + - AC3 + - EAC3 + - TrueHD + - DolbyAtmos + - DTS + - DTS-HD + - AAC + - FLAC + - PCM + - MP2 + - MP3 + - Vorbis + - Opus + - WMAv1 + - WMAv2 + - WMAPro + profile: + type: string + description: Audio codec profile + enum: + - Main + - LC + channels_count: + type: integer + format: int32 + minimum: 0 + description: Number of channels + channel_positions: + type: string + description: Channel positions + channels: + type: string + description: Audio channels + enum: + - "1.0" + - "2.0" + - "5.1" + - "7.1" + bit_depth: + type: integer + format: int32 + minimum: 0 + description: Audio bit depth + bit_rate: + type: integer + format: int32 + minimum: 0 + description: Audio bit rate + bit_rate_mode: + type: string + enum: [Constant, Variable] + sampling_rate: + type: integer + format: int32 + description: Audio sampling rate + compression: + type: string + enum: [Lossy, Lossless] + description: Audio compression + forced: + type: boolean + description: Whether this track is forced + default: + type: boolean + description: Whether this track is default + subtitle: + type: array + items: + type: object + properties: + number: + type: integer + format: int32 + minimum: 0 + description: Track number + name: + type: string + description: Track name + language: + type: string + description: Track language + hearing_impaired: + type: boolean + description: Whether this track is for hearing impaired people + format: + type: string + description: Subtitles format + enum: + - PGS + - VobSub + - SubRip + - SubStationAlpha + - AdvancedSubStationAlpha + - Tx3g + encoding: + type: string + description: Subtitles encoding + enum: + - 'utf-8' + forced: + type: boolean + description: Whether this track is forced + default: + type: boolean + description: Whether this track is default +parameters: + detailed: + name: detailed + in: query + required: false + description: Whether response should contain detailed information + type: boolean + page: + name: page + in: query + required: false + description: The page to be returned. Default value is 1 + type: integer + format: int32 + limit: + name: limit + in: query + required: false + description: Maximum number of items per page. Default value is 20. Max value is 1000 + type: integer + format: int32 + sort: + name: sort + in: query + required: false + description: The field (or list of fields) to be used while sorting. Use + or - prefix to define sorting order. + type: string + series-id: + name: series-id + in: path + required: true + description: The series id to retrieve. E.g. tvdb1234 + x-example: tvdb301824 + type: string + episode-id: + name: episode-id + in: path + required: true + description: The episode id to retrieve. E.g. s02e03, e34 or 2016-12-31 + x-example: s01e01 + type: string + alias-id: + name: alias-id + in: path + required: true + description: The alias id to retrieve + x-example: 123456 + type: integer + format: int32 + alias-source-id: + name: alias-source-id + in: path + required: true + description: The alias-source id to retrieve + type: string + enum: [local, xem, anidb] + alias-source-with-all-id: + name: alias-source-id + in: path + required: true + description: The alias-source id to retrieve + type: string + enum: [all, local, xem, anidb] + asset-id: + name: asset-id + in: path + required: true + description: The asset to retrieve + type: string + enum: + - banner + - bannerThumb + - fanart + - poster + - posterThumb + - network + - small + config-id: + name: config-id + in: path + required: true + description: The configuration to retrieve + type: string + enum: + - main + log-level: + name: level + in: query + required: false + description: The log level + type: string + enum: [ERROR, WARNING, INFO, DEBUG, DB] +responses: + pagination: + description: Pagination response + headers: + X-Pagination-Page: + type: integer + format: int32 + description: The page number + X-Pagination-Limit: + type: integer + format: int32 + description: The pagination limit + X-Pagination-Count: + type: integer + format: int32 + description: The total items count + Link: + type: string + description: "The pagination links: next, last, first and previous" + pagination_stream: + description: Pagination response + headers: + X-Pagination-Page: + type: integer + format: int32 + description: The page number + X-Pagination-Limit: + type: integer + format: int32 + description: The pagination limit + Link: + type: string + description: "The pagination links: next, last, first and previous" + error: + description: Unexpected error + schema: + $ref: '#/definitions/Error' diff --git a/dredd.yml b/dredd/dredd.yml similarity index 76% rename from dredd.yml rename to dredd/dredd.yml index 9aec524c84..6165991f57 100644 --- a/dredd.yml +++ b/dredd/dredd.yml @@ -1,15 +1,12 @@ dry-run: null -hookfiles: ./dredd_hook.py +hookfiles: dredd/dredd_hook.py language: python sandbox: false -server: python ./start.py -server-wait: 20 +server: python dredd/dredd_hook.py +server-wait: 10 init: false -custom: - apiaryApiKey: '' names: false only: [] -reporter: apiary output: [] header: [] sorted: false @@ -30,6 +27,6 @@ hooks-worker-term-timeout: 5000 hooks-worker-term-retry: 500 hooks-worker-handler-host: localhost hooks-worker-handler-port: 61321 -config: ./dredd.yml -blueprint: api-description.yml +config: dredd.yml +blueprint: dredd/api-description.yml endpoint: 'http://localhost:8081' diff --git a/dredd/dredd_hook.py b/dredd/dredd_hook.py new file mode 100644 index 0000000000..369982a7d7 --- /dev/null +++ b/dredd/dredd_hook.py @@ -0,0 +1,173 @@ +"""Dredd hook.""" +import ConfigParser +import json +import urlparse +from collections import Mapping +from urllib import urlencode + +import dredd_hooks as hooks + +from six import string_types +import yaml + + +api_description = None + +stash = { + 'web-username': 'testuser', + 'web-password': 'testpass', + 'api-key': '1234567890ABCDEF1234567890ABCDEF', +} + + +@hooks.before_all +def load_api_description(transactions): + """Load api description.""" + global api_description + with open(transactions[0]['origin']['filename'], 'r') as stream: + api_description = yaml.safe_load(stream) + + +@hooks.before_each +def configure_transaction(transaction): + """Configure request based on x- property values for each response code.""" + base_path = api_description['basePath'] + + path = transaction['origin']['resourceName'] + method = transaction['request']['method'] + status_code = int(transaction['expected']['statusCode']) + response = api_description['paths'][path[len(base_path):]][method.lower()]['responses'][status_code] + + # Whether we should skip this test + transaction['skip'] = response.get('x-disabled', False) + + # Add api-key + if not response.get('x-no-api-key', False): + transaction['request']['headers']['x-api-key'] = stash['api-key'] + + # If no body is expected, skip body validation + expected = transaction['expected'] + expected_content_type = expected['headers'].get('Content-Type') + expected_status_code = int(expected['statusCode']) + if expected_status_code == 204 or response.get('x-expect', {}).get('no-body', False): + del expected['body'] + if expected_content_type: + print('Skipping content-type validation for {name!r}.'.format(name=transaction['name'])) + del expected['headers']['Content-Type'] + + # Keep stash configuration in the transaction to be executed in an after hook + transaction['x-stash'] = response.get('x-stash') or {} + + # Change request based on x-request configuration + url = transaction['fullPath'] + parsed_url = urlparse.urlparse(url) + parsed_params = urlparse.parse_qs(parsed_url.query) + parsed_path = parsed_url.path + + request = response.get('x-request', {}) + body = request.get('body') + if body is not None: + transaction['request']['body'] = json.dumps(evaluate(body)) + + path_params = request.get('path-params') + if path_params: + params = {} + resource_parts = path.split('/') + for i, part in enumerate(url.split('/')): + if not part: + continue + + resource_part = resource_parts[i] + if resource_part[0] == '{' and resource_part[-1] == '}': + params[resource_part[1:-1]] = part + + params.update(path_params) + new_url = path + for name, value in params.items(): + value = evaluate(value) + new_url = new_url.replace('{' + name + '}', str(value)) + + replace_url(transaction, new_url) + + query_params = request.get('query-params') + if query_params: + for name, value in query_params.items(): + query_params[name] = evaluate(value) + + query_params = dict(parsed_params, **query_params) + new_url = parsed_path if not query_params else parsed_path + '?' + urlencode(query_params) + + replace_url(transaction, new_url) + + +@hooks.after_each +def stash_values(transaction): + """Stash values.""" + if 'real' in transaction and 'bodySchema' in transaction['expected']: + body = json.loads(transaction['real']['body']) if transaction['real']['body'] else None + headers = transaction['real']['headers'] + for name, value in transaction['x-stash'].items(): + value = evaluate(value, {'body': body, 'headers': headers}) + print('Stashing {name}: {value!r}'.format(name=name, value=value)) + stash[name] = value + + +def replace_url(transaction, new_url): + """Replace with a new URL.""" + transaction['fullPath'] = new_url + transaction['request']['uri'] = new_url + transaction['id'] = transaction['request']['method'] + ' ' + new_url + + +def evaluate(expression, context=None): + """Evaluate the expression value.""" + context = context or {'stash': stash} + if isinstance(expression, string_types) and expression.startswith('${') and expression.endswith('}'): + value = eval(expression[2:-1], context) + print('Expression {expression} evaluated to {value!r}'.format(expression=expression, value=value)) + return value + elif isinstance(expression, Mapping): + for key, value in expression.items(): + expression[key] = evaluate(value, context=context) + elif isinstance(expression, list): + for i, value in enumerate(expression): + expression[i] = evaluate(value, context=context) + + return expression + + +def start(): + """Start application.""" + import os + import shutil + import sys + + current_dir = os.path.dirname(__file__) + app_dir = os.path.abspath(os.path.join(current_dir, '..')) + data_dir = os.path.abspath(os.path.join(current_dir, 'data')) + if os.path.isdir(data_dir): + shutil.rmtree(data_dir) + args = [ + '--datadir={0}'.format(data_dir), + '--nolaunch', + ] + + os.makedirs(data_dir) + os.chdir(data_dir) + config = ConfigParser.RawConfigParser() + config.read('config.ini') + config.add_section('General') + config.set('General', 'web_username', stash['web-username']) + config.set('General', 'web_password', stash['web-password']) + config.set('General', 'api_key', stash['api-key']) + with open('config.ini', 'wb') as configfile: + config.write(configfile) + + sys.path.insert(1, app_dir) + + from medusa.__main__ import Application + application = Application() + application.start(args) + +if __name__ == '__main__': + start() diff --git a/dredd_hook.py b/dredd_hook.py deleted file mode 100644 index 98706a116e..0000000000 --- a/dredd_hook.py +++ /dev/null @@ -1,34 +0,0 @@ -"""Dredd hook.""" -import ConfigParser - -import dredd_hooks as hooks - -web_username = 'test_username' -web_password = 'test_password' -api_key = 'test_api_key' - - -@hooks.before_all -def set_auth(transaction): - """Set auth.""" - config = ConfigParser.RawConfigParser() - config.read(r'config.ini') - config.set('General', 'web_username', web_username) - config.set('General', 'web_password', web_password) - config.set('General', 'api_key', api_key) - with open('config.ini', 'wb') as configfile: - config.write(configfile) - - -@hooks.before_each -def add_api_key(transaction): - """Add api key.""" - transaction['request']['headers']['x-api-key'] = api_key - - -@hooks.before('/authenticate > POST') -def add_auth(transaction): - """Add auth.""" - del transaction['request']['headers']['x-api-key'] - transaction['request']['body']['username'] = web_username - transaction['request']['body']['password'] = web_password diff --git a/medusa/db.py b/medusa/db.py index e1d42d5b0f..0e24ce8a72 100644 --- a/medusa/db.py +++ b/medusa/db.py @@ -100,16 +100,16 @@ def _execute(self, query, args=None, fetchall=False, fetchone=False): :return: query results """ try: + cursor = self.connection.cursor() if not args: - sql_results = self.connection.cursor().execute(query) + sql_results = cursor.execute(query) else: - sql_results = self.connection.cursor().execute(query, args) + sql_results = cursor.execute(query, args) if fetchall: return sql_results.fetchall() elif fetchone: return sql_results.fetchone() - else: - return sql_results + return sql_results except sqlite3.OperationalError as e: # This errors user should be able to fix it. if 'unable to open database file' in e.args[0] or \ diff --git a/medusa/helper/collections.py b/medusa/helper/collections.py new file mode 100644 index 0000000000..5da32e69ac --- /dev/null +++ b/medusa/helper/collections.py @@ -0,0 +1,11 @@ +# coding=utf-8 +"""Extended collections.""" + + +class NonEmptyDict(dict): + """Dictionary that only accept values that are not none and not empty strings.""" + + def __setitem__(self, key, value): + """Discard None values and empty strings.""" + if key in self or value is not None and value != '': + super(NonEmptyDict, self).__setitem__(key, value) diff --git a/medusa/image_cache.py b/medusa/image_cache.py index 350ed76183..3a76d35737 100644 --- a/medusa/image_cache.py +++ b/medusa/image_cache.py @@ -39,7 +39,8 @@ def __init__(self): def __del__(self): pass - def _cache_dir(self): + @classmethod + def _cache_dir(cls): """Build up the full path to the image cache directory.""" return os.path.abspath(os.path.join(app.CACHE_DIR, 'images')) @@ -47,7 +48,8 @@ def _thumbnails_dir(self): """Build up the full path to the thumbnails image cache directory.""" return os.path.abspath(os.path.join(self._cache_dir(), 'thumbnails')) - def poster_path(self, indexer_id): + @classmethod + def poster_path(cls, indexer_id): """ Build up the path to a poster cache for a given Indexer ID. @@ -55,9 +57,10 @@ def poster_path(self, indexer_id): :return: a full path to the cached poster file for the given Indexer ID """ poster_file_name = '{0}.poster.jpg'.format(indexer_id) - return os.path.join(self._cache_dir(), poster_file_name) + return os.path.join(cls._cache_dir(), poster_file_name) - def banner_path(self, indexer_id): + @classmethod + def banner_path(cls, indexer_id): """ Build up the path to a banner cache for a given Indexer ID. @@ -65,7 +68,7 @@ def banner_path(self, indexer_id): :return: a full path to the cached banner file for the given Indexer ID """ banner_file_name = '{0}.banner.jpg'.format(indexer_id) - return os.path.join(self._cache_dir(), banner_file_name) + return os.path.join(cls._cache_dir(), banner_file_name) def fanart_path(self, indexer_id): """ diff --git a/medusa/indexers/indexer_config.py b/medusa/indexers/indexer_config.py index 339f606ce3..f6cb1eddc9 100644 --- a/medusa/indexers/indexer_config.py +++ b/medusa/indexers/indexer_config.py @@ -122,7 +122,16 @@ def indexer_name_to_id(indexer_name): :param indexer_name: Identifier of the indexer. Example: will return 1 for 'tvdb'. :return: The indexer id. """ - return {v['identifier']: k for k, v in indexerConfig.items()}[indexer_name] + return {v['identifier']: k for k, v in indexerConfig.items()}.get(indexer_name) + + +def indexer_id_to_name(indexer): + """Reverse translate the indexer identifier to it's id. + + :param indexer: Indexer id. E.g.: 1. + :return: The indexer name. E.g.: tvdb + """ + return indexerConfig[indexer]['identifier'] def indexer_id_to_slug(indexer, indexer_id): @@ -144,7 +153,8 @@ def slug_to_indexer_id(slug): if not slug: return None, None result = re.compile(r'([a-z]+)([0-9]+)').match(slug) - return indexer_name_to_id(result.group(1)), int(result.group(2)) + if result: + return indexer_name_to_id(result.group(1)), int(result.group(2)) def get_trakt_indexer(indexer): diff --git a/medusa/logger/__init__.py b/medusa/logger/__init__.py index 1bd516db23..f9fc309314 100644 --- a/medusa/logger/__init__.py +++ b/medusa/logger/__init__.py @@ -106,7 +106,7 @@ def get_loggers(package): return [standard_logger(modname) for modname in list_modules(package)] -def read_loglines(log_file=None, modification_time=None, max_lines=None, max_traceback_depth=100, +def read_loglines(log_file=None, modification_time=None, start_index=0, max_lines=None, max_traceback_depth=100, predicate=lambda logline: True, formatter=lambda logline: logline): """A generator that returns the lines of all consolidated log files in descending order. @@ -114,6 +114,7 @@ def read_loglines(log_file=None, modification_time=None, max_lines=None, max_tra :type log_file: str or unicode :param modification_time: :type modification_time: datetime.datetime + :param start_index: :param max_lines: :type max_lines: int :param max_traceback_depth: @@ -132,6 +133,7 @@ def read_loglines(log_file=None, modification_time=None, max_lines=None, max_tra for f in log_files: if not f or not os.path.isfile(f): continue + if modification_time: log_mtime = os.path.getmtime(f) if log_mtime and datetime.datetime.fromtimestamp(log_mtime) < modification_time: @@ -150,7 +152,8 @@ def read_loglines(log_file=None, modification_time=None, max_lines=None, max_tra del traceback_lines[:] if predicate(logline): counter += 1 - yield formatter(logline) + if counter >= start_index: + yield formatter(logline) if max_lines is not None and counter >= max_lines: return @@ -160,7 +163,8 @@ def read_loglines(log_file=None, modification_time=None, max_lines=None, max_tra del traceback_lines[:] if predicate(logline): counter += 1 - yield formatter(logline) + if counter >= start_index: + yield formatter(logline) if max_lines is not None and counter >= max_lines: return else: @@ -170,7 +174,9 @@ def read_loglines(log_file=None, modification_time=None, max_lines=None, max_tra message = traceback_lines[-1] logline = LogLine(message, message=message, traceback_lines=list(reversed(traceback_lines[:-1]))) if predicate(logline): - yield formatter(logline) + counter += 1 + if counter >= start_index: + yield formatter(logline) def reverse_readlines(filename, buf_size=2097152, encoding=default_encoding): diff --git a/medusa/server/api/v2/alias.py b/medusa/server/api/v2/alias.py new file mode 100644 index 0000000000..eac426368f --- /dev/null +++ b/medusa/server/api/v2/alias.py @@ -0,0 +1,173 @@ +# coding=utf-8 +"""Request handler for alias (scene exceptions).""" + +from medusa import db +from medusa.helper.collections import NonEmptyDict +from medusa.server.api.v2.base import BaseRequestHandler +from medusa.tv.series import SeriesIdentifier +from tornado.escape import json_decode + + +class AliasHandler(BaseRequestHandler): + """Alias request handler.""" + + #: resource name + name = 'alias' + #: identifier + identifier = ('identifier', r'\d+') + #: path param + path_param = ('path_param', r'\w+') + #: allowed HTTP methods + allowed_methods = ('GET', 'POST', 'PUT', 'DELETE') + + def get(self, identifier, path_param): + """Query scene_exception information.""" + cache_db_con = db.DBConnection('cache.db') + sql_base = (b'SELECT ' + b' exception_id, ' + b' indexer, ' + b' indexer_id, ' + b' show_name, ' + b' season, ' + b' custom ' + b'FROM scene_exceptions ') + sql_where = [] + params = [] + + if identifier is not None: + sql_where.append(b'exception_id') + params += [identifier] + else: + series_slug = self.get_query_argument('series', None) + series_identifier = SeriesIdentifier.from_slug(series_slug) + + if series_slug and not series_identifier: + return self._bad_request('Invalid series') + + season = self._parse(self.get_query_argument('season', None)) + exception_type = self.get_query_argument('type', None) + if exception_type and exception_type not in ('local', ): + return self._bad_request('Invalid type') + + if series_identifier: + sql_where.append(b'indexer') + sql_where.append(b'indexer_id') + params += [series_identifier.indexer.id, series_identifier.id] + + if season is not None: + sql_where.append(b'season') + params += [season] + + if exception_type == 'local': + sql_where.append(b'custom') + params += [1] + + if sql_where: + sql_base += b' WHERE ' + b' AND '.join([where + b' = ? ' for where in sql_where]) + + sql_results = cache_db_con.select(sql_base, params) + + data = [] + for item in sql_results: + d = NonEmptyDict() + d['id'] = item[0] + d['series'] = SeriesIdentifier.from_id(item[1], item[2]).slug + d['name'] = item[3] + d['season'] = item[4] if item[4] >= 0 else None + d['type'] = 'local' if item[5] else None + data.append(d) + + if not identifier: + return self._paginate(data, sort='id') + + if not data: + return self._not_found('Alias not found') + + data = data[0] + if path_param: + if path_param not in data: + return self._bad_request('Invalid path parameter') + data = data[path_param] + + return self._ok(data=data) + + def put(self, identifier, **kwargs): + """Update alias information.""" + identifier = self._parse(identifier) + if not identifier: + return self._not_found('Invalid alias id') + + data = json_decode(self.request.body) + + if not data or not all([data.get('id'), data.get('series'), data.get('name'), + data.get('type')]) or data['id'] != identifier: + return self._bad_request('Invalid request body') + + series_identifier = SeriesIdentifier.from_slug(data.get('series')) + if not series_identifier: + return self._bad_request('Invalid series') + + cache_db_con = db.DBConnection('cache.db') + last_changes = cache_db_con.connection.total_changes + cache_db_con.action(b'UPDATE scene_exceptions' + b' set indexer = ?' + b', indexer_id = ?' + b', show_name = ?' + b', season = ?' + b', custom = 1' + b' WHERE exception_id = ?', + [series_identifier.indexer.id, + series_identifier.id, + data['name'], + data.get('season'), + identifier]) + + if cache_db_con.connection.total_changes - last_changes != 1: + return self._not_found('Alias not found') + + return self._no_content() + + def post(self, identifier, **kwargs): + """Add an alias.""" + if identifier is not None: + return self._bad_request('Alias id should not be specified') + + data = json_decode(self.request.body) + + if not data or not all([data.get('series'), data.get('name'), + data.get('type')]) or 'id' in data or data['type'] != 'local': + return self._bad_request('Invalid request body') + + series_identifier = SeriesIdentifier.from_slug(data.get('series')) + if not series_identifier: + return self._bad_request('Invalid series') + + cache_db_con = db.DBConnection('cache.db') + last_changes = cache_db_con.connection.total_changes + cursor = cache_db_con.action(b'INSERT INTO scene_exceptions' + b' (indexer, indexer_id, show_name, season, custom) ' + b' values (?,?,?,?,1)', + [series_identifier.indexer.id, + series_identifier.id, + data['name'], + data.get('season')]) + + if cache_db_con.connection.total_changes - last_changes <= 0: + return self._conflict('Unable to create alias') + + data['id'] = cursor.lastrowid + return self._created(data=data, identifier=data['id']) + + def delete(self, identifier, **kwargs): + """Delete an alias.""" + identifier = self._parse(identifier) + if not identifier: + return self._bad_request('Invalid alias id') + + cache_db_con = db.DBConnection('cache.db') + last_changes = cache_db_con.connection.total_changes + cache_db_con.action(b'DELETE FROM scene_exceptions WHERE exception_id = ?', [identifier]) + if cache_db_con.connection.total_changes - last_changes <= 0: + return self._not_found('Alias not found') + + return self._no_content() diff --git a/medusa/server/api/v2/alias_source.py b/medusa/server/api/v2/alias_source.py new file mode 100644 index 0000000000..12f89213af --- /dev/null +++ b/medusa/server/api/v2/alias_source.py @@ -0,0 +1,92 @@ +# coding=utf-8 +"""Request handler for alias source.""" +from datetime import datetime + +from medusa.scene_exceptions import get_last_refresh, retrieve_exceptions +from medusa.server.api.v2.base import BaseRequestHandler +from tornado.escape import json_decode + + +def find_alias_sources(predicate=None): + """Query the cache table for the last update for every scene exception source.""" + data = [] + mapping = {'local': 'custom_exceptions'} + for identifier in ('local', 'xem', 'anidb'): + if not predicate or predicate(identifier): + last_refresh = get_last_refresh(mapping.get(identifier, identifier))[0]['last_refreshed'] + data.append({'id': identifier, 'lastRefresh': last_refresh}) + + return data + + +class AliasSourceHandler(BaseRequestHandler): + """Alias source request handler.""" + + #: resource name + name = 'alias-source' + #: identifier + identifier = ('identifier', r'\w+') + #: path param + path_param = ('path_param', r'\w+') + #: allowed HTTP methods + allowed_methods = ('GET', ) + + def get(self, identifier, path_param=None): + """Query alias source information. + + :param identifier: source name + :param path_param: + """ + if not identifier: + data = find_alias_sources() + return self._paginate(data, sort='id') + + data = find_alias_sources(predicate=lambda v: v == identifier) + if not data: + return self._not_found('Alias source not found.') + + data = data[0] + if path_param: + if path_param not in data: + return self._bad_request('Invalid path parameter') + data = data[path_param] + + return self._ok(data=data) + + +class AliasSourceOperationHandler(BaseRequestHandler): + """Alias source operation request handler.""" + + #: parent resource handler + parent_handler = AliasSourceHandler + #: resource name + name = 'operation' + #: identifier + identifier = None + #: path param + path_param = None + #: allowed HTTP methods + allowed_methods = ('POST', ) + + def post(self, identifier): + """Refresh all scene exception types.""" + types = { + 'local': 'custom_exceptions', + 'xem': 'xem', + 'anidb': 'anidb', + 'all': None, + } + + if identifier not in types: + return self._not_found('Alias source not found') + + data = json_decode(self.request.body) + if not data or not all([data.get('type')]) and len(data) != 1: + return self._bad_request('Invalid request body') + + if data['type'] == 'REFRESH': + retrieve_exceptions(force=True, exception_type=types[identifier]) + data['creation'] = datetime.utcnow().isoformat()[:-3] + 'Z' + return self._created(data=data) + + return self._bad_request('Operation not supported') diff --git a/medusa/server/api/v2/asset.py b/medusa/server/api/v2/asset.py deleted file mode 100644 index e1b82ea1be..0000000000 --- a/medusa/server/api/v2/asset.py +++ /dev/null @@ -1,36 +0,0 @@ -# coding=utf-8 -"""Request handler for assets.""" - -from .base import BaseRequestHandler -from ....media.banner import ShowBanner -from ....media.fan_art import ShowFanArt -from ....media.network_logo import ShowNetworkLogo -from ....media.poster import ShowPoster - - -class AssetHandler(BaseRequestHandler): - """Asset request handler.""" - - def get(self, asset_group=None, query=None, *args, **kwargs): - """Get an asset.""" - if asset_group == 'show': - # http://localhost:8081/api/v2/asset/show/295519?api_key=xxx&type=banner - asset_type = self.get_argument('type', default='banner') - show_id = query - media = None - media_format = ('normal', 'thumb')[asset_type in ('bannerThumb', 'posterThumb', 'small')] - - if asset_type.lower().startswith('banner'): - media = ShowBanner(show_id, media_format) - elif asset_type.lower().startswith('fanart'): - media = ShowFanArt(show_id, media_format) - elif asset_type.lower().startswith('poster'): - media = ShowPoster(show_id, media_format) - elif asset_type.lower().startswith('network'): - media = ShowNetworkLogo(show_id, media_format) - - if media is not None: - self.set_header('Content-Type', media.get_media_type()) - self.api_finish(stream=media.get_media()) - else: - self.api_finish(status=404, error='Asset or Asset Type Does Not Exist') diff --git a/medusa/server/api/v2/auth.py b/medusa/server/api/v2/auth.py index a627b30d41..f7ec6bef49 100644 --- a/medusa/server/api/v2/auth.py +++ b/medusa/server/api/v2/auth.py @@ -1,25 +1,27 @@ # coding=utf-8 """Request handler for authentication.""" +import logging import random import string import time + import jwt -import tornado +from medusa import app, helpers, notifiers +from medusa.logger.adapters.style import BraceAdapter +from medusa.server.api.v2.base import BaseRequestHandler +from tornado.escape import json_decode -from .base import BaseRequestHandler -from .... import app, helpers, logger, notifiers +log = BraceAdapter(logging.getLogger(__name__)) class AuthHandler(BaseRequestHandler): """Auth request handler.""" - def set_default_headers(self): - """Set default CORS headers.""" - super(AuthHandler, self).set_default_headers() - if app.APP_VERSION: - self.set_header('X-Medusa-Server', app.APP_VERSION) - self.set_header('Access-Control-Allow-Methods', 'POST, OPTIONS') + #: resource name + name = 'authenticate' + #: allowed HTTP methods + allowed_methods = ('POST', ) def prepare(self): """Prepare.""" @@ -29,54 +31,47 @@ def post(self, *args, **kwargs): """Request JWT.""" username = app.WEB_USERNAME password = app.WEB_PASSWORD - submitted_username = '' - submitted_password = '' - submitted_exp = 86400 # 1 day - request_body = {} # If the user hasn't set a username and/or password just let them login - if username.strip() != '' and password.strip() != '': - if self.request.body: - if self.request.headers['content-type'] == 'application/json': - request_body = tornado.escape.json_decode(self.request.body) - else: - self._failed_login(error='Incorrect content-type') - if all(x in request_body for x in ['username', 'password']): - submitted_username = request_body['username'] - submitted_password = request_body['password'] - if 'exp' in request_body: - submitted_exp = request_body['exp'] - else: - self._failed_login(error='No Credentials Provided') - - if username != submitted_username or password != submitted_password: - self._failed_login(error='Invalid credentials') - else: - self._login(submitted_exp) - else: - self._login() + if not username.strip() or not password.strip(): + return self._login() + + if not self.request.body: + return self._failed_login(error='No Credentials Provided') + + if self.request.headers['content-type'] != 'application/json': + return self._failed_login(error='Incorrect content-type') + + request_body = json_decode(self.request.body) + submitted_username = request_body.get('username') + submitted_password = request_body.get('password') + submitted_exp = request_body.get('exp', 86400) + if username != submitted_username or password != submitted_password: + return self._failed_login(error='Invalid credentials') + + self._login(submitted_exp) def _login(self, exp=86400): self.set_header('Content-Type', 'application/jwt') if app.NOTIFY_ON_LOGIN and not helpers.is_ip_private(self.request.remote_ip): notifiers.notify_login(self.request.remote_ip) - logger.log('{user} logged into the API v2'.format(user=app.WEB_USERNAME), logger.INFO) + log.info('{user} logged into the API v2', {'user': app.WEB_USERNAME}) time_now = int(time.time()) - self.api_finish(data=jwt.encode({ + self._ok(data=jwt.encode({ 'iss': 'Medusa ' + app.APP_VERSION, 'iat': time_now, # @TODO: The jti should be saved so we can revoke tokens 'jti': ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(20)), 'exp': time_now + int(exp), - 'scopes': ['show:read', 'show:write'], # @TODO: This should be reaplce with scopes or roles/groups + 'scopes': ['show:read', 'show:write'], # @TODO: This should be replaced with scopes or roles/groups 'username': app.WEB_USERNAME, 'apiKey': app.API_KEY # TODO: This should be replaced with the JWT itself }, app.ENCRYPTION_SECRET, algorithm='HS256')) def _failed_login(self, error=None): - self.api_finish(status=401, error=error) - logger.log('{user} attempted a failed login to the API v2 from IP: {ip}'.format( - user=app.WEB_USERNAME, - ip=self.request.remote_ip - ), logger.WARNING) + self._unauthorized(error=error) + log.warning('{user} attempted a failed login to the API v2 from IP: {ip}', { + 'user': app.WEB_USERNAME, + 'ip': self.request.remote_ip + }) diff --git a/medusa/server/api/v2/base.py b/medusa/server/api/v2/base.py index 72eb7f7a17..cf0d155805 100644 --- a/medusa/server/api/v2/base.py +++ b/medusa/server/api/v2/base.py @@ -2,125 +2,283 @@ """Base module for request handlers.""" import base64 +import collections import json import operator import traceback -from datetime import datetime +from datetime import date, datetime from babelfish.language import Language import jwt -from six import text_type +from medusa import app +from six import string_types, text_type +from tornado.httpclient import HTTPError from tornado.web import RequestHandler -from .... import app - class BaseRequestHandler(RequestHandler): """A base class used for shared RequestHandler methods.""" + DEFAULT_ALLOWED_METHODS = ('OPTIONS', ) + + #: resource name + name = None + #: identifier + identifier = None + #: path param + path_param = None + #: allowed HTTP methods + allowed_methods = None + #: parent resource handler + parent_handler = None + def prepare(self): """Check if JWT or API key is provided and valid.""" - if self.request.method != 'OPTIONS': - token = '' - api_key = '' - if self.request.headers.get('Authorization'): - if self.request.headers.get('Authorization').startswith('Bearer'): - try: - token = jwt.decode(self.request.headers.get('Authorization').replace('Bearer ', ''), app.ENCRYPTION_SECRET, algorithms=['HS256']) - except jwt.ExpiredSignatureError: - self.api_finish(status=401, error='Token has expired.') - except jwt.DecodeError: - self.api_finish(status=401, error='Invalid token.') - if self.request.headers.get('Authorization').startswith('Basic'): - auth_decoded = base64.decodestring(self.request.headers.get('Authorization')[6:]) - username, password = auth_decoded.split(':', 2) - if username != app.WEB_USERNAME or password != app.WEB_PASSWORD: - self.api_finish(status=401, error='Invalid user/pass.') - - if self.get_argument('api_key', default='') and self.get_argument('api_key', default='') == app.API_KEY: - api_key = self.get_argument('api_key', default='') - if self.request.headers.get('X-Api-Key') and self.request.headers.get('X-Api-Key') == app.API_KEY: - api_key = self.request.headers.get('X-Api-Key') - if token == '' and api_key == '': - self.api_finish(status=401, error='Invalid token or API key.') + if self.request.method == 'OPTIONS': + return + + api_key = self.get_argument('api_key', default=None) or self.request.headers.get('X-Api-Key') + if api_key and api_key == app.API_KEY: + return + + authorization = self.request.headers.get('Authorization') + if not authorization: + return self._unauthorized('No authorization token.') + + if authorization.startswith('Bearer'): + try: + token = authorization.replace('Bearer ', '') + jwt.decode(token, app.ENCRYPTION_SECRET, algorithms=['HS256']) + except jwt.ExpiredSignatureError: + return self._unauthorized('Token has expired.') + except jwt.DecodeError: + return self._unauthorized('Invalid token.') + elif authorization.startswith('Basic'): + auth_decoded = base64.decodestring(authorization[6:]) + username, password = auth_decoded.split(':', 2) + if username != app.WEB_USERNAME or password != app.WEB_PASSWORD: + return self._unauthorized('Invalid user/pass.') + else: + return self._unauthorized('Invalid token.') def write_error(self, *args, **kwargs): """Only send traceback if app.DEVELOPER is true.""" if app.DEVELOPER and 'exc_info' in kwargs: self.set_header('content-type', 'text/plain') + self.set_status(500) for line in traceback.format_exception(*kwargs["exc_info"]): self.write(line) self.finish() else: - self.api_finish(status=500, error='Internal Server Error') + self._internal_server_error() def options(self, *args, **kwargs): """Options.""" - self.set_status(204) - self.finish() + self._no_content() def set_default_headers(self): """Set default CORS headers.""" + if app.APP_VERSION: + self.set_header('X-Medusa-Server', app.APP_VERSION) self.set_header('Access-Control-Allow-Origin', '*') self.set_header('Access-Control-Allow-Headers', 'Origin, Accept, Authorization, Content-Type,' 'X-Requested-With, X-CSRF-Token, X-Api-Key, X-Medusa-Server') - self.set_header('Access-Control-Allow-Methods', 'GET, OPTIONS') + self.set_header('Access-Control-Allow-Methods', ', '.join(self.DEFAULT_ALLOWED_METHODS + self.allowed_methods)) - def api_finish(self, status=None, error=None, data=None, headers=None, stream=None, **kwargs): + def api_finish(self, status=None, error=None, data=None, headers=None, stream=None, content_type=None, **kwargs): """End the api request writing error or data to http response.""" + content_type = content_type or 'application/json; charset=UTF-8' if headers is not None: for header in headers: self.set_header(header, headers[header]) if error is not None and status is not None: - self.set_header('content-type', 'application/json') self.set_status(status) + self.set_header('content-type', content_type) self.finish({ 'error': error }) else: self.set_status(status or 200) if data is not None: - self.set_header('content-type', 'application/json') - self.finish(json.JSONEncoder(default=json_string_encoder).encode(data)) + self.set_header('content-type', content_type) + self.finish(json.JSONEncoder(default=json_default_encoder).encode(data)) elif stream: # This is mainly for assets + self.set_header('content-type', content_type) self.finish(stream) - elif kwargs: + elif kwargs and 'chunk' in kwargs: + self.set_header('content-type', content_type) self.finish(kwargs) + @classmethod + def _create_base_url(cls, prefix_url, resource_name, *args): + elements = [prefix_url, resource_name] + \ + [r'(?P<{key}>{value})'.format(key=key, value=value) for (key, value) in args] + return '/'.join(elements) + + @classmethod + def create_url(cls, prefix_url, resource_name, *args): + """Create url base on resource name and path params.""" + resource_url = prefix_url + '/' + resource_name + path_params = '' + + for arg in args: + if not arg: + continue + + key, value = arg + q = r'(?:/(?P<{key}>{value}))'.format(key=key, value=value) + if path_params: + path_params = r'(?:{previous}(?:{current}|/?))'.format(previous=path_params, current=q) + else: + path_params = q + + path_params = r'(?:{path}|/?)'.format(path=path_params) + + return resource_url + path_params + '/?$' + + @classmethod + def create_app_handler(cls, base): + """Create app handler tuple: regex, class.""" + if cls.parent_handler: + base = cls._create_base_url(base, cls.parent_handler.name, cls.parent_handler.identifier) + + return cls.create_url(base, cls.name, *(cls.identifier, cls.path_param)), cls + + def _handle_request_exception(self, e): + if isinstance(e, HTTPError): + self.api_finish(e.code, e.message) + else: + super(BaseRequestHandler, self)._handle_request_exception(e) + + def _ok(self, data=None, headers=None, stream=None, content_type=None): + self.api_finish(200, data=data, headers=headers, stream=stream, content_type=content_type) + + def _created(self, data=None, identifier=None): + if identifier is not None: + location = self.request.path + if not location.endswith('/'): + location += '/' + + self.set_header('Location', '{0}{1}'.format(location, identifier)) + self.api_finish(201, data=data) + + def _accepted(self): + self.api_finish(202) + + def _no_content(self): + self.api_finish(204) + + def _bad_request(self, error): + self.api_finish(400, error=error) + + def _unauthorized(self, error): + self.api_finish(401, error=error) + + def _not_found(self, error='Resource not found'): + self.api_finish(404, error=error) + + def _method_not_allowed(self, error): + self.api_finish(405, error=error) + + def _conflict(self, error): + self.api_finish(409, error=error) + + def _internal_server_error(self, error='Internal Server Error'): + self.api_finish(500, error=error) + + def _not_implemented(self): + self.api_finish(501) + + @classmethod + def _raise_bad_request_error(cls, error): + raise HTTPError(400, error) + def _get_sort(self, default): - return self.get_argument('sort', default=default) + values = self.get_argument('sort', default=default) + if values: + results = [] + for value in values.split(','): + reverse = value.startswith('-') + if reverse or value.startswith('+'): + value = value[1:] - def _get_sort_order(self, default='asc'): - return self.get_argument('sort_order', default=default).lower() + results.append((value, reverse)) + + return results def _get_page(self): - return max(1, int(self.get_argument('page', default=1))) + try: + page = int(self.get_argument('page', default=1)) + if page < 1: + self._raise_bad_request_error('Invalid page parameter') + + return page + except ValueError: + self._raise_bad_request_error('Invalid page parameter') def _get_limit(self, default=20, maximum=1000): - return min(max(1, int(self.get_argument('limit', default=default))), maximum) + try: + limit = self._parse(self.get_argument('limit', default=default)) + if limit < 1 or limit > maximum: + self._raise_bad_request_error('Invalid limit parameter') - def _paginate(self, data, sort_property): - arg_sort = self._get_sort(default=sort_property) - arg_sort_order = self._get_sort_order() + return limit + except ValueError: + self._raise_bad_request_error('Invalid limit parameter') + + def _paginate(self, data=None, data_generator=None, sort=None): arg_page = self._get_page() arg_limit = self._get_limit() - results = sorted(data, key=operator.itemgetter(arg_sort), reverse=arg_sort_order == 'desc') - count = len(results) - start = (arg_page - 1) * arg_limit - end = start + arg_limit - results = results[start:end] headers = { - 'X-Pagination-Count': count, 'X-Pagination-Page': arg_page, 'X-Pagination-Limit': arg_limit } - return self.api_finish(data=results, headers=headers) + first_page = arg_page if arg_page > 0 else 1 + previous_page = None if arg_page <= 1 else arg_page - 1 + if data_generator: + results = list(data_generator())[:arg_limit] + next_page = None if len(results) < arg_limit else arg_page + 1 + last_page = None + else: + arg_sort = self._get_sort(default=sort) + start = (arg_page - 1) * arg_limit + end = start + arg_limit + results = data + if arg_sort: + try: + for field, reverse in reversed(arg_sort): + results = sorted(results, key=operator.itemgetter(field), reverse=reverse) + except KeyError: + return self._bad_request('Invalid sort query parameter') + + count = len(results) + headers['X-Pagination-Count'] = count + results = results[start:end] + next_page = None if end > count else arg_page + 1 + last_page = ((count - 1) / arg_limit) + 1 + if last_page <= arg_page: + last_page = None + + links = [] + for rel, page in (('next', next_page), ('last', last_page), + ('first', first_page), ('previous', previous_page)): + if page is None: + continue + + delimiter = '&' if self.request.query_arguments else '?' + link = '<{uri}{delimiter}page={page}&limit={limit}>; rel="{rel}"'.format( + uri=self.request.uri, delimiter=delimiter, page=page, limit=arg_limit, rel=rel) + links.append(link) + + self.set_header('Link', ', '.join(links)) + + return self._ok(data=results, headers=headers) - @staticmethod - def _parse(value, function=int): + @classmethod + def _parse(cls, value, function=int): """Parse value using the specified function. :param value: @@ -129,10 +287,13 @@ def _parse(value, function=int): :return: """ if value is not None: - return function(value) + try: + return function(value) + except ValueError: + cls._raise_bad_request_error('Invalid value {value!r}'.format(value=value)) - @staticmethod - def _parse_boolean(value): + @classmethod + def _parse_boolean(cls, value): """Parse value using the specified function. :param value: @@ -141,17 +302,17 @@ def _parse_boolean(value): if isinstance(value, text_type): return value.lower() == 'true' - return bool(value) + return cls._parse(value, bool) - @staticmethod - def _parse_date(value, fmt='%Y-%m-%d'): + @classmethod + def _parse_date(cls, value, fmt='%Y-%m-%d'): """Parse a date value using the specified format. :param value: :param fmt: :return: """ - return BaseRequestHandler._parse(value, lambda d: datetime.strptime(d, fmt)) + return cls._parse(value, lambda d: datetime.strptime(d, fmt)) class NotFoundHandler(BaseRequestHandler): @@ -161,10 +322,113 @@ def get(self, *args, **kwargs): """Get.""" self.api_finish(status=404) + @classmethod + def create_app_handler(cls, base): + """Capture everything.""" + return r'{base}(/?.*)'.format(base=base), cls -def json_string_encoder(o): + +def json_default_encoder(o): """Convert properties to string.""" if isinstance(o, Language): return getattr(o, 'name') + if isinstance(o, set): + return list(o) + + if isinstance(o, date): + return o.isoformat() + return text_type(o) + + +def iter_nested_items(data, prefix=''): + """Iterate through the dictionary. + + Nested keys are separated with dots. + """ + for key, value in data.items(): + p = prefix + key + if isinstance(value, collections.Mapping): + for inner_key, inner_value in iter_nested_items(value, prefix=p + '.'): + yield inner_key, inner_value + else: + yield p, value + + +def set_nested_value(data, key, value): + """Set nested value to the dictionary.""" + keys = key.split('.') + for k in keys[:-1]: + data = data.setdefault(k, {}) + + data[keys[-1]] = value + + +class PatchField(object): + """Represent a field to be patched.""" + + def __init__(self, target_type, attr, attr_type, + validator=None, converter=None, default_value=None, post_processor=None): + """Constructor.""" + if not hasattr(target_type, attr): + raise ValueError('{0!r} has no attribute {1}'.format(target_type, attr)) + + self.target_type = target_type + self.attr = attr + self.attr_type = attr_type + self.validator = validator or (lambda v: isinstance(v, self.attr_type)) + self.converter = converter or (lambda v: v) + self.default_value = default_value + self.post_processor = post_processor + + def patch(self, target, value): + """Patch the field with the specified value.""" + valid = self.validator(value) + + if not valid and self.default_value is not None: + value = self.default_value + valid = True + + if valid: + setattr(target, self.attr, self.converter(value)) + if self.post_processor: + self.post_processor(value) + return True + + +class StringField(PatchField): + """Patch string fields.""" + + def __init__(self, target_type, attr, validator=None, converter=None, default_value=None, post_processor=None): + """Constructor.""" + super(StringField, self).__init__(target_type, attr, string_types, validator=validator, converter=converter, + default_value=default_value, post_processor=post_processor) + + +class IntegerField(PatchField): + """Patch integer fields.""" + + def __init__(self, target_type, attr, validator=None, converter=None, default_value=None, post_processor=None): + """Constructor.""" + super(IntegerField, self).__init__(target_type, attr, int, validator=validator, converter=converter, + default_value=default_value, post_processor=post_processor) + + +class BooleanField(PatchField): + """Patch boolean fields.""" + + def __init__(self, target_type, attr, validator=None, converter=int, default_value=None, post_processor=None): + """Constructor.""" + super(BooleanField, self).__init__(target_type, attr, bool, validator=validator, converter=converter, + default_value=default_value, post_processor=post_processor) + + +class EnumField(PatchField): + """Patch enumeration fields.""" + + def __init__(self, target_type, attr, enums, attr_type=text_type, + converter=None, default_value=None, post_processor=None): + """Constructor.""" + super(EnumField, self).__init__(target_type, attr, attr_type, validator=lambda v: v in enums, + converter=converter, default_value=default_value, post_processor=post_processor) diff --git a/medusa/server/api/v2/config.py b/medusa/server/api/v2/config.py index 8f058bdd39..1ea9f3131f 100644 --- a/medusa/server/api/v2/config.py +++ b/medusa/server/api/v2/config.py @@ -1,298 +1,213 @@ # coding=utf-8 """Request handler for configuration.""" - +import logging import platform import sys + +from medusa import ( + app, + db, +) +from medusa.helper.collections import NonEmptyDict +from medusa.server.api.v2.base import ( + BaseRequestHandler, + BooleanField, + EnumField, + IntegerField, + StringField, + iter_nested_items, + set_nested_value, +) from six import text_type from tornado.escape import json_decode -from .base import BaseRequestHandler -from .... import app, db, logger +log = logging.getLogger(__name__) + + +def layout_schedule_post_processor(v): + """Calendar layout should sort by date.""" + if v == 'calendar': + app.COMING_EPS_SORT = 'date' class ConfigHandler(BaseRequestHandler): """Config request handler.""" - def set_default_headers(self): - """Set default CORS headers.""" - super(ConfigHandler, self).set_default_headers() - self.set_header('Access-Control-Allow-Methods', 'GET, PATCH, OPTIONS') + #: resource name + name = 'config' + #: identifier + identifier = ('identifier', r'\w+') + #: path param + path_param = ('path_param', r'\w+') + #: allowed HTTP methods + allowed_methods = ('GET', 'PATCH', ) + #: patch mapping + patches = { + 'anonRedirect': StringField(app, 'ANON_REDIRECT'), + 'emby.enabled': BooleanField(app, 'USE_EMBY'), + 'torrents.enabled': BooleanField(app, 'USE_TORRENTS'), + 'torrents.username': StringField(app, 'TORRENT_USERNAME'), + 'torrents.password': StringField(app, 'TORRENT_PASSWORD'), + 'torrents.label': StringField(app, 'TORRENT_LABEL'), + 'torrents.labelAnime': StringField(app, 'TORRENT_LABEL_ANIME'), + 'torrents.verifySSL': BooleanField(app, 'TORRENT_VERIFY_CERT'), + 'torrents.path': BooleanField(app, 'TORRENT_PATH'), + 'selectedRootIndex': IntegerField(app, 'SELECTED_ROOT'), + 'layout.schedule': EnumField(app, 'COMING_EPS_LAYOUT', ('poster', 'banner', 'list', 'calendar'), + default_value='banner', post_processor=layout_schedule_post_processor), + 'layout.history': EnumField(app, 'HISTORY_LAYOUT', ('compact', 'detailed'), default_value='detailed'), + 'layout.home': EnumField(app, 'HOME_LAYOUT', ('poster', 'small', 'banner', 'simple', 'coverflow'), + default_value='poster'), + 'layout.show.allSeasons': BooleanField(app, 'DISPLAY_ALL_SEASONS'), + 'layout.show.specials': BooleanField(app, 'DISPLAY_SHOW_SPECIALS'), + 'theme.name': StringField(app, 'THEME_NAME'), + 'backlogOverview.period': StringField(app, 'BACKLOG_PERIOD'), + 'backlogOverview.status': StringField(app, 'BACKLOG_STATUS'), + } - def get(self, query=''): + def get(self, identifier, path_param=None): """Query general configuration. - :param query: - :type query: str + :param identifier: + :param path_param: + :type path_param: str """ - config_data = { - 'anonRedirect': app.ANON_REDIRECT, - 'animeSplitHome': app.ANIME_SPLIT_HOME, - 'comingEpsSort': app.COMING_EPS_SORT, - 'datePreset': app.DATE_PRESET, - 'fuzzyDating': app.FUZZY_DATING, - 'themeName': app.THEME_NAME, - 'posterSortby': app.POSTER_SORTBY, - 'posterSortdir': app.POSTER_SORTDIR, - 'rootDirs': app.ROOT_DIRS, - 'sortArticle': app.SORT_ARTICLE, - 'timePreset': app.TIME_PRESET, - 'trimZero': app.TRIM_ZERO, - 'fanartBackground': app.FANART_BACKGROUND, - 'fanartBackgroundOpacity': 0 if app.FANART_BACKGROUND_OPACITY is None else float(app.FANART_BACKGROUND_OPACITY), - 'branch': app.BRANCH, - 'commitHash': app.CUR_COMMIT_HASH, - 'release': app.APP_VERSION, - 'sslVersion': app.OPENSSL_VERSION, - 'pythonVersion': sys.version, - 'databaseVersion': { - 'major': app.MAJOR_DB_VERSION, - 'minor': app.MINOR_DB_VERSION - }, - 'os': platform.platform(), - 'locale': '.'.join([text_type(loc or 'Unknown') for loc in app.LOCALE]), - 'localUser': app.OS_USER or 'Unknown', - 'programDir': app.PROG_DIR, - 'configFile': app.CONFIG_FILE, - 'dbFilename': db.dbFilename(), - 'cacheDir': app.CACHE_DIR, - 'logDir': app.LOG_DIR, - 'appArgs': app.MY_ARGS, - 'webRoot': app.WEB_ROOT, - 'githubUrl': app.GITHUB_IO_URL, - 'wikiUrl': app.WIKI_URL, - 'sourceUrl': app.APPLICATION_URL, - 'downloadUrl': app.DOWNLOAD_URL, - 'subtitlesMulti': app.SUBTITLES_MULTI, - 'namingForceFolders': app.NAMING_FORCE_FOLDERS, - 'subtitles': { - 'enabled': bool(app.USE_SUBTITLES) - }, - 'kodi': { - 'enabled': bool(app.USE_KODI and app.KODI_UPDATE_LIBRARY) - }, - 'plex': { - 'server': { - 'enabled': bool(app.USE_PLEX_SERVER), - 'notify': { - 'snatch': bool(app.PLEX_NOTIFY_ONSNATCH), - 'download': bool(app.PLEX_NOTIFY_ONDOWNLOAD), - 'subtitleDownload': bool(app.PLEX_NOTIFY_ONSUBTITLEDOWNLOAD) - }, - 'updateLibrary': bool(app.PLEX_UPDATE_LIBRARY), - 'host': app.PLEX_SERVER_HOST, - 'token': app.PLEX_SERVER_TOKEN, - 'username': app.PLEX_SERVER_USERNAME, - 'password': app.PLEX_SERVER_PASSWORD - }, - 'client': { - 'enabled': bool(app.USE_PLEX_CLIENT), - 'username': app.PLEX_CLIENT_USERNAME, - 'password': app.PLEX_CLIENT_PASSWORD, - 'host': app.PLEX_CLIENT_HOST - } - }, - 'emby': { - 'enabled': bool(app.USE_EMBY) - }, - 'torrents': { - 'enabled': bool(app.USE_TORRENTS), - 'method': app.TORRENT_METHOD, - 'username': app.TORRENT_USERNAME, - 'password': app.TORRENT_PASSWORD, - 'label': app.TORRENT_LABEL, - 'labelAnime': app.TORRENT_LABEL_ANIME, - 'verifySSL': app.TORRENT_VERIFY_CERT, - 'path': app.TORRENT_PATH, - 'seedTime': app.TORRENT_SEED_TIME, - 'paused': app.TORRENT_PAUSED, - 'highBandwidth': app.TORRENT_HIGH_BANDWIDTH, - 'host': app.TORRENT_HOST, - 'rpcurl': app.TORRENT_RPCURL, - 'authType': app.TORRENT_AUTH_TYPE - }, - 'nzb': { - 'enabled': bool(app.USE_NZBS), - 'username': app.NZBGET_USERNAME, - 'password': app.NZBGET_PASSWORD, - # app.NZBGET_CATEGORY - # app.NZBGET_CATEGORY_BACKLOG - # app.NZBGET_CATEGORY_ANIME - # app.NZBGET_CATEGORY_ANIME_BACKLOG - 'host': app.NZBGET_HOST, - 'priority': app.NZBGET_PRIORITY - }, - 'layout': { - 'schedule': app.COMING_EPS_LAYOUT, - 'history': app.HISTORY_LAYOUT, - 'home': app.HOME_LAYOUT, - 'show': { - 'allSeasons': bool(app.DISPLAY_ALL_SEASONS), - 'specials': bool(app.DISPLAY_SHOW_SPECIALS) - } - }, - 'selectedRootIndex': int(app.SELECTED_ROOT) if app.SELECTED_ROOT else None, - 'backlogOverview': { - 'period': app.BACKLOG_PERIOD, - 'status': app.BACKLOG_STATUS - } - } + if identifier and identifier != 'main': + return self._not_found('Config not found') + + config_data = NonEmptyDict() + config_data['anonRedirect'] = app.ANON_REDIRECT + config_data['animeSplitHome'] = app.ANIME_SPLIT_HOME + config_data['comingEpsSort'] = app.COMING_EPS_SORT + config_data['datePreset'] = app.DATE_PRESET + config_data['fuzzyDating'] = app.FUZZY_DATING + config_data['themeName'] = app.THEME_NAME + config_data['posterSortby'] = app.POSTER_SORTBY + config_data['posterSortdir'] = app.POSTER_SORTDIR + config_data['rootDirs'] = app.ROOT_DIRS + config_data['sortArticle'] = app.SORT_ARTICLE + config_data['timePreset'] = app.TIME_PRESET + config_data['trimZero'] = app.TRIM_ZERO + config_data['fanartBackground'] = app.FANART_BACKGROUND + config_data['fanartBackgroundOpacity'] = float(app.FANART_BACKGROUND_OPACITY or 0) + config_data['branch'] = app.BRANCH + config_data['commitHash'] = app.CUR_COMMIT_HASH + config_data['release'] = app.APP_VERSION + config_data['sslVersion'] = app.OPENSSL_VERSION + config_data['pythonVersion'] = sys.version + config_data['databaseVersion'] = NonEmptyDict() + config_data['databaseVersion']['major'] = app.MAJOR_DB_VERSION + config_data['databaseVersion']['minor'] = app.MINOR_DB_VERSION + config_data['os'] = platform.platform() + config_data['locale'] = '.'.join([text_type(loc or 'Unknown') for loc in app.LOCALE]) + config_data['localUser'] = app.OS_USER or 'Unknown' + config_data['programDir'] = app.PROG_DIR + config_data['configFile'] = app.CONFIG_FILE + config_data['dbFilename'] = db.dbFilename() + config_data['cacheDir'] = app.CACHE_DIR + config_data['logDir'] = app.LOG_DIR + config_data['appArgs'] = app.MY_ARGS + config_data['webRoot'] = app.WEB_ROOT + config_data['githubUrl'] = app.GITHUB_IO_URL + config_data['wikiUrl'] = app.WIKI_URL + config_data['sourceUrl'] = app.APPLICATION_URL + config_data['downloadUrl'] = app.DOWNLOAD_URL + config_data['subtitlesMulti'] = app.SUBTITLES_MULTI + config_data['namingForceFolders'] = app.NAMING_FORCE_FOLDERS + config_data['subtitles'] = NonEmptyDict() + config_data['subtitles']['enabled'] = bool(app.USE_SUBTITLES) + config_data['kodi'] = NonEmptyDict() + config_data['kodi']['enabled'] = bool(app.USE_KODI and app.KODI_UPDATE_LIBRARY) + config_data['plex'] = NonEmptyDict() + config_data['plex']['server'] = NonEmptyDict() + config_data['plex']['server']['enabled'] = bool(app.USE_PLEX_SERVER) + config_data['plex']['server']['notify'] = NonEmptyDict() + config_data['plex']['server']['notify']['snatch'] = bool(app.PLEX_NOTIFY_ONSNATCH) + config_data['plex']['server']['notify']['download'] = bool(app.PLEX_NOTIFY_ONDOWNLOAD) + config_data['plex']['server']['notify']['subtitleDownload'] = bool(app.PLEX_NOTIFY_ONSUBTITLEDOWNLOAD) + + config_data['plex']['server']['updateLibrary'] = bool(app.PLEX_UPDATE_LIBRARY) + config_data['plex']['server']['host'] = app.PLEX_SERVER_HOST + config_data['plex']['server']['token'] = app.PLEX_SERVER_TOKEN + config_data['plex']['server']['username'] = app.PLEX_SERVER_USERNAME + config_data['plex']['server']['password'] = app.PLEX_SERVER_PASSWORD + config_data['plex']['client'] = NonEmptyDict() + config_data['plex']['client']['enabled'] = bool(app.USE_PLEX_CLIENT) + config_data['plex']['client']['username'] = app.PLEX_CLIENT_USERNAME + config_data['plex']['client']['password'] = app.PLEX_CLIENT_PASSWORD + config_data['plex']['client']['host'] = app.PLEX_CLIENT_HOST + config_data['emby'] = NonEmptyDict() + config_data['emby']['enabled'] = bool(app.USE_EMBY) + config_data['torrents'] = NonEmptyDict() + config_data['torrents']['enabled'] = bool(app.USE_TORRENTS) + config_data['torrents']['method'] = app.TORRENT_METHOD + config_data['torrents']['username'] = app.TORRENT_USERNAME + config_data['torrents']['password'] = app.TORRENT_PASSWORD + config_data['torrents']['label'] = app.TORRENT_LABEL + config_data['torrents']['labelAnime'] = app.TORRENT_LABEL_ANIME + config_data['torrents']['verifySSL'] = app.TORRENT_VERIFY_CERT + config_data['torrents']['path'] = app.TORRENT_PATH + config_data['torrents']['seedTime'] = app.TORRENT_SEED_TIME + config_data['torrents']['paused'] = app.TORRENT_PAUSED + config_data['torrents']['highBandwidth'] = app.TORRENT_HIGH_BANDWIDTH + config_data['torrents']['host'] = app.TORRENT_HOST + config_data['torrents']['rpcurl'] = app.TORRENT_RPCURL + config_data['torrents']['authType'] = app.TORRENT_AUTH_TYPE + config_data['nzb'] = NonEmptyDict() + config_data['nzb']['enabled'] = bool(app.USE_NZBS) + config_data['nzb']['username'] = app.NZBGET_USERNAME + config_data['nzb']['password'] = app.NZBGET_PASSWORD + # app.NZBGET_CATEGORY + # app.NZBGET_CATEGORY_BACKLOG + # app.NZBGET_CATEGORY_ANIME + # app.NZBGET_CATEGORY_ANIME_BACKLOG + config_data['nzb']['host'] = app.NZBGET_HOST + config_data['nzb']['priority'] = app.NZBGET_PRIORITY + config_data['layout'] = NonEmptyDict() + config_data['layout']['schedule'] = app.COMING_EPS_LAYOUT + config_data['layout']['history'] = app.HISTORY_LAYOUT + config_data['layout']['home'] = app.HOME_LAYOUT + config_data['layout']['show'] = NonEmptyDict() + config_data['layout']['show']['allSeasons'] = bool(app.DISPLAY_ALL_SEASONS) + config_data['layout']['show']['specials'] = bool(app.DISPLAY_SHOW_SPECIALS) + config_data['selectedRootIndex'] = int(app.SELECTED_ROOT) if app.SELECTED_ROOT else None + config_data['backlogOverview'] = NonEmptyDict() + config_data['backlogOverview']['period'] = app.BACKLOG_PERIOD + config_data['backlogOverview']['status'] = app.BACKLOG_STATUS + + if not identifier: + return self._paginate([config_data]) - if query and query not in config_data: - return self.api_finish(status=404, error='{key} not found'.format(key=query)) + if path_param: + if path_param not in config_data: + return self._bad_request('{key} is a invalid path'.format(key=path_param)) - self.api_finish(data=config_data[query] if query else config_data) + config_data = config_data[path_param] - def patch(self, *args, **kwargs): + return self._ok(data=config_data) + + def patch(self, identifier, *args, **kwargs): """Patch general configuration.""" + if not identifier: + return self._bad_request('Config identifier not specified') + + if identifier != 'main': + return self._not_found('Config not found') + data = json_decode(self.request.body) - done_data = {} - done_errors = [] - for key in data.keys(): - if key == 'anonRedirect': - app.ANON_REDIRECT = data['anonRedirect'] - done_data.setdefault('anonRedirect', app.ANON_REDIRECT) - # 'animeSplitHome': app.ANIME_SPLIT_HOME, - # 'comingEpsSort': app.COMING_EPS_SORT, - # 'datePreset': app.DATE_PRESET, - # 'fuzzyDating': app.FUZZY_DATING, - # 'themeName': app.THEME_NAME, - # 'posterSortby': app.POSTER_SORTBY, - # 'posterSortdir': app.POSTER_SORTDIR, - # 'rootDirs': app.ROOT_DIRS, - # 'sortArticle': app.SORT_ARTICLE, - # 'timePreset': app.TIME_PRESET, - # 'trimZero': app.TRIM_ZERO, - # 'fanartBackground': app.FANART_BACKGROUND, - # 'fanartBackgroundOpacity': app.FANART_BACKGROUND_OPACITY, - # 'branch': app.BRANCH, # @TODO: If branch change we should checkout new branch and if success return 200 otherwise return error - if key in ['commitHash', 'release', 'sslVersion', 'pythonVersion', 'databaseVersion', 'os', 'locale', 'localUser', ]: - # This is for fields that are static within the API - # For example you shouldn't be able to change the OS - done_errors.append(key) - # 'programDir': app.PROG_DIR, - # 'configFile': app.CONFIG_FILE, - # 'dbFilename': db.dbFilename(), - # 'cacheDir': app.CACHE_DIR, - # 'logDir': app.LOG_DIR, - # 'appArgs': app.MY_ARGS, - # 'webRoot': app.WEB_ROOT, - # 'githubUrl': app.GITHUB_IO_URL, - # 'wikiUrl': app.WIKI_URL, - # 'sourceUrl': app.APPLICATION_URL, - # 'downloadUrl': app.DOWNLOAD_URL, - # 'subtitlesMulti': app.SUBTITLES_MULTI, - # 'namingForceFolders': app.NAMING_FORCE_FOLDERS, - # 'subtitles': { - # 'enabled': bool(app.USE_SUBTITLES) - # }, - # 'kodi': { - # 'enabled': bool(app.USE_KODI and app.KODI_UPDATE_LIBRARY) - # }, - # 'plex': { - # 'server': { - # 'enabled': bool(app.USE_PLEX_SERVER and app.PLEX_UPDATE_LIBRARY) - # }, - # 'client': { - # 'enabled': False # Replace this with plex client code - # } - # }, - if key == 'emby': - done_data.setdefault('emby', {}) - if 'enabled' in data['emby'] and str(data['emby']['enabled']).lower() in ['true', 'false']: - # @TODO: All booleans should be saved as booleans - app.USE_EMBY = int(data['emby']['enabled']) - done_data['emby'].setdefault('enabled', bool(app.USE_EMBY)) - if key == 'torrents': - done_data.setdefault('torrents', {}) - if 'enabled' in data['torrents'] and str(data['torrents']['enabled']).lower() in ['true', 'false']: - # @TODO: All booleans should be saved as booleans - app.USE_TORRENTS = int(data['torrents']['enabled']) - done_data['torrents'].setdefault('enabled', bool(app.USE_TORRENTS)) - if 'username' in data['torrents']: - app.TORRENT_USERNAME = str(data['torrents']['username']) - done_data['torrents'].setdefault('username', app.TORRENT_USERNAME) - if 'password' in data['torrents']: - app.TORRENT_PASSWORD = str(data['torrents']['password']) - done_data['torrents'].setdefault('password', app.TORRENT_PASSWORD) - if 'label' in data['torrents']: - app.TORRENT_LABEL = str(data['torrents']['label']) - done_data['torrents'].setdefault('label', app.TORRENT_LABEL) - if 'labelAnime' in data['torrents']: - app.TORRENT_LABEL_ANIME = str(data['torrents']['labelAnime']) - done_data['torrents'].setdefault('labelAnime', app.TORRENT_LABEL_ANIME) - if 'verifySSL' in data['torrents'] and str(data['torrents']['verifySSL']).lower() in ['true', 'false']: - # @TODO: All booleans should be saved as booleans - app.TORRENT_VERIFY_CERT = int(data['torrents']['verifySSL']) - done_data['torrents'].setdefault('verifySSL', bool(app.TORRENT_VERIFY_CERT)) - if 'path' in data['torrents']: - app.TORRENT_PATH = str(data['torrents']['path']) - done_data['torrents'].setdefault('verifySSL', app.TORRENT_VERIFY_CERT) - # 'path': app.TORRENT_PATH, - # 'seedTime': app.TORRENT_SEED_TIME, - # 'paused': app.TORRENT_PAUSED, - # 'highBandwidth': app.TORRENT_HIGH_BANDWIDTH, - # 'host': app.TORRENT_HOST, - # 'rpcurl': app.TORRENT_RPCURL, - # 'authType': app.TORRENT_AUTH_TYPE - # if 'method' in data['torrents']: - # if 'username' in data['torrents']: - # if 'password' in data['torrents']: - # if 'label' in data['torrents']: - # if 'labelAnime' in data['torrents']: - # if 'verifySSL' in data['torrents']: - # if 'seedTime' in data['torrents']: - # if 'highBandwidth' in data['torrents']: - # if 'host' in data['torrents']: - # if 'rpcurl' in data['torrents']: - # if 'authType' in data['torrents']: - if key == 'selectedRootIndex': - root_id = int(data['selectedRootIndex']) - app.SELECTED_ROOT = root_id - done_data['selectedRootIndex'] = root_id - if key == 'layout': - done_data.setdefault('layout', {}) - if 'schedule' in data['layout']: - if data['layout']['schedule'] in ('poster', 'banner', 'list', 'calendar'): - if data['layout']['schedule'] == 'calendar': - app.COMING_EPS_SORT = 'date' - app.COMING_EPS_LAYOUT = data['layout']['schedule'] - else: - app.COMING_EPS_LAYOUT = 'banner' - done_data['layout'].setdefault('schedule', app.COMING_EPS_LAYOUT) - if 'history' in data['layout']: - if data['layout']['history'] in ('compact', 'detailed'): - app.HISTORY_LAYOUT = data['layout']['history'] - else: - app.HISTORY_LAYOUT = 'detailed' - done_data['layout'].setdefault('history', app.HISTORY_LAYOUT) - if 'home' in data['layout']: - if data['layout']['home'] in ('poster', 'small', 'banner', 'simple', 'coverflow'): - app.HOME_LAYOUT = data['layout']['home'] - else: - app.HOME_LAYOUT = 'poster' - done_data['layout'].setdefault('home', app.HOME_LAYOUT) - if 'show' in data['layout']: - done_data['layout'].setdefault('show', {}) - if 'allSeasons' in data['layout']['show'] and str(data['layout']['show']['allSeasons']).lower() in ['true', 'false']: - app.DISPLAY_ALL_SEASONS = int(data['layout']['show']['allSeasons']) - done_data['layout']['show'].setdefault('allSeasons', bool(app.DISPLAY_ALL_SEASONS)) - if 'specials' in data['layout']['show'] and str(data['layout']['show']['specials']).lower() in ['true', 'false']: - app.DISPLAY_SHOW_SPECIALS = int(data['layout']['show']['specials']) - done_data['layout']['show'].setdefault('specials', bool(app.DISPLAY_SHOW_SPECIALS)) - if key == 'theme': - theme_name = data['theme']['name'] - app.THEME_NAME = theme_name - done_data['themeName'] = theme_name - if key == 'backlogOverview': - done_data.setdefault('backlogOverview', {}) - if 'period' in data['backlogOverview']: - app.BACKLOG_PERIOD = data['backlogOverview']['period'] - done_data['backlogOverview'].setdefault('period', app.BACKLOG_PERIOD) - if 'status' in data['backlogOverview']: - app.BACKLOG_STATUS = data['backlogOverview']['status'] - done_data['backlogOverview'].setdefault('status', app.BACKLOG_STATUS) + accepted = {} + ignored = {} + + for key, value in iter_nested_items(data): + patch_field = self.patches.get(key) + if patch_field and patch_field.patch(app, value): + set_nested_value(accepted, key, value) + else: + set_nested_value(ignored, key, value) + + if ignored: + log.warning('Config patch ignored %r', ignored) + # Make sure to update the config file after everything is updated app.instance.save_config() - if len(done_errors): - logger.log('Can\'t PATCH [' + ', '.join(done_errors) + '] since ' + ["it's a static field.", "they're static fields."][len(done_errors) > 1]) - self.api_finish(data=done_data) + self._ok(data=accepted) diff --git a/medusa/server/api/v2/episode.py b/medusa/server/api/v2/episode.py new file mode 100644 index 0000000000..2334ef4a04 --- /dev/null +++ b/medusa/server/api/v2/episode.py @@ -0,0 +1,63 @@ +# coding=utf-8 +"""Request handler for series and episodes.""" + +from medusa.server.api.v2.base import BaseRequestHandler +from medusa.server.api.v2.series import SeriesHandler +from medusa.tv.episode import Episode, EpisodeNumber +from medusa.tv.series import Series, SeriesIdentifier + + +class EpisodeHandler(BaseRequestHandler): + """Episodes request handler.""" + + #: parent resource handler + parent_handler = SeriesHandler + #: resource name + name = 'episode' + #: identifier + identifier = ('episode_slug', r'[\w-]+') + #: path param + path_param = ('path_param', r'\w+') + #: allowed HTTP methods + allowed_methods = ('GET', ) + + def get(self, series_slug, episode_slug, path_param): + """Query episode information. + + :param series_slug: series slug. E.g.: tvdb1234 + :param episode_number: + :param path_param: + """ + series_identifier = SeriesIdentifier.from_slug(series_slug) + if not series_identifier: + return self._bad_request('Invalid series slug') + + series = Series.find_by_identifier(series_identifier) + if not series: + return self._not_found('Series not found') + + if not episode_slug: + detailed = self._parse_boolean(self.get_argument('detailed', default=False)) + season = self._parse(self.get_argument('season', None), int) + data = [e.to_json(detailed=detailed) for e in series.get_all_episodes(season=season)] + return self._paginate(data, sort='airDate') + + episode_number = EpisodeNumber.from_slug(episode_slug) + if not episode_number: + return self._bad_request('Invalid episode number') + + episode = Episode.find_by_series_and_episode(series, episode_number) + if not episode: + return self._not_found('Episode not found') + + detailed = self._parse_boolean(self.get_argument('detailed', default=True)) + data = episode.to_json(detailed=detailed) + if path_param: + if path_param == 'metadata': + data = episode.metadata() if episode.is_location_valid() else {} + elif path_param in data: + data = data[path_param] + else: + return self._bad_request("Invalid path parameter'{0}'".format(path_param)) + + return self._ok(data=data) diff --git a/medusa/server/api/v2/log.py b/medusa/server/api/v2/log.py index a4ec38018f..c2bab136e3 100644 --- a/medusa/server/api/v2/log.py +++ b/medusa/server/api/v2/log.py @@ -3,54 +3,59 @@ import json import logging -from .base import BaseRequestHandler -from ....logger import LOGGING_LEVELS, filter_logline, read_loglines +from medusa.logger import LOGGING_LEVELS, filter_logline, read_loglines +from medusa.logger.adapters.style import BraceAdapter +from medusa.server.api.v2.base import BaseRequestHandler -logger = logging.getLogger(__name__) +log = BraceAdapter(logging.getLogger(__name__)) class LogHandler(BaseRequestHandler): """Log request handler.""" - def get(self, log_level): - """Query logs. + #: resource name + name = 'log' + #: identifier + identifier = None + #: allowed HTTP methods + allowed_methods = ('GET', 'POST', ) + + def get(self): + """Query logs.""" + log_level = self.get_argument('level', 'INFO').upper() + if log_level not in LOGGING_LEVELS: + return self._bad_request('Invalid log level') - :param log_level: - :type log_level: str - """ - log_level = log_level or 'INFO' arg_page = self._get_page() arg_limit = self._get_limit() - min_level = LOGGING_LEVELS[log_level.upper()] + min_level = LOGGING_LEVELS[log_level] - data = [line.to_json() for line in read_loglines(max_lines=arg_limit + arg_page, - predicate=lambda l: filter_logline(l, min_level=min_level))] - start = (arg_page - 1) * arg_limit - end = start + arg_limit - data = data[start:end] + def data_generator(): + """Read log lines based on the specified criteria.""" + start = arg_limit * (arg_page - 1) + 1 + for l in read_loglines(start_index=start, max_lines=arg_limit * arg_page, + predicate=lambda li: filter_logline(li, min_level=min_level)): + yield l.to_json() - self.api_finish(data=data, headers={ - 'X-Pagination-Page': arg_page, - 'X-Pagination-Limit': arg_limit - }) + return self._paginate(data_generator=data_generator) - def delete(self, log_level='ERROR'): - """Delete logs. - - :param log_level: - """ - self.api_finish() - - def post(self, log_level): + def post(self): """Create a log line. By definition this method is NOT idempotent. """ data = json.loads(self.request.body) + if not data or not all([data.get('message')]): + return self._bad_request('Invalid request') + + data['level'] = data.get('level', 'INFO').upper() + if data['level'] not in LOGGING_LEVELS: + return self._bad_request('Invalid log level') + message = data['message'] args = data.get('args', []) - kwargs = data.get('kwargs', dict()) - level = LOGGING_LEVELS[data.get('level', 'ERROR').upper()] - logger.log(level, message, exc_info=False, *args, **kwargs) - self.api_finish(status=201) + kwargs = data.get('kwargs', {}) + level = LOGGING_LEVELS[data['level']] + log.log(level, message, exc_info=False, *args, **kwargs) + self._created() diff --git a/medusa/server/api/v2/scene_exception.py b/medusa/server/api/v2/scene_exception.py deleted file mode 100644 index afb267baf3..0000000000 --- a/medusa/server/api/v2/scene_exception.py +++ /dev/null @@ -1,221 +0,0 @@ -# coding=utf-8 -"""Request handler for scene_exceptions.""" - -from medusa.indexers.indexer_config import indexer_id_to_slug, slug_to_indexer_id -from medusa.scene_exceptions import get_last_refresh, retrieve_exceptions -from tornado.escape import json_decode -from .base import BaseRequestHandler -from .... import db - - -def get_last_updates(): - """Query the cache table for the last update for every scene exception source.""" - last_updates = {} - for scene_exception_source in ['custom_exceptions', 'xem', 'anidb']: - last_updates[scene_exception_source] = get_last_refresh(scene_exception_source)[0]['last_refreshed'] - return last_updates - - -class SceneExceptionTypeHandler(BaseRequestHandler): - """Scene Exception type request handler.""" - - def set_default_headers(self): - """Set default CORS headers.""" - super(SceneExceptionTypeHandler, self).set_default_headers() - self.set_header('Access-Control-Allow-Methods', 'GET') - - def get(self, exception_type): - """Return a list or instance of exception types.""" - exception_types = [] - if exception_type is not None and exception_type not in ['medusa', 'xem', 'anidb']: - return self.api_finish(status=400) - - if exception_type: - mapped_exception_type = {'medusa': 'custom_exceptions'}.get(exception_type, exception_type) - for k, v in get_last_updates().items(): - if k == mapped_exception_type: - exception_types = {"id": {'custom_exceptions': 'medusa'}.get(k, k), "lastUpdate": v} - else: - exception_types = [{"id": {'custom_exceptions': 'medusa'}.get(k, k), "lastUpdate": v} - for k, v in get_last_updates().items()] - - self.api_finish(data=exception_types) - - -class SceneExceptionAllTypeOperationHandler(BaseRequestHandler): - """Scene Exception operation request handler, to update all scene exception types.""" - - def set_default_headers(self): - """Set default CORS headers.""" - super(SceneExceptionAllTypeOperationHandler, self).set_default_headers() - self.set_header('Access-Control-Allow-Methods', 'POST') - - def post(self): - """Start scene exceptions type operation. To refresh all scene exception types.""" - json_body = json_decode(self.request.body) - - if json_body.get('type', '') == 'REFRESH': - retrieve_exceptions(force=True, exception_type=None) - return self.api_finish(status=201) - return self.api_finish(status=400) - - -class SceneExceptionTypeOperationHandler(BaseRequestHandler): - """Scene Exception operation request handler.""" - - def set_default_headers(self): - """Set default CORS headers.""" - super(SceneExceptionTypeOperationHandler, self).set_default_headers() - self.set_header('Access-Control-Allow-Methods', 'POST') - - def post(self, exception_type): - """Start scene exceptions type operation.""" - if not exception_type or exception_type not in ['medusa', 'xem', 'anidb']: - return self.api_finish(status=400) - - exception_type = {'medusa': 'custom_exceptions'}.get(exception_type, exception_type) - - json_body = json_decode(self.request.body) - - if json_body.get('type', '') == 'REFRESH': - retrieve_exceptions(force=True, exception_type=exception_type) - return self.api_finish(status=201) - return self.api_finish(status=400) - - -class SceneExceptionHandler(BaseRequestHandler): - """Scene Exception request handler.""" - - def set_default_headers(self): - """Set default CORS headers.""" - super(SceneExceptionHandler, self).set_default_headers() - self.set_header('Access-Control-Allow-Methods', 'GET, OPTIONS, PUT, POST') - - def get(self, exception_id, **kwargs): - """Query scene_exception information. - - :param show_indexer_id: The indexer id. Like 1 for tmdb and 3 for tvmaze. - :param show_id: - :type show_indexer_id: str - :param season: - """ - slug = self.get_query_argument('indexer', None) - indexer, indexer_id = slug_to_indexer_id(slug) - season = self.get_query_argument('season', None) - exception_type = bool(self.get_query_argument('type', None) == 'custom') - - cache_db_con = db.DBConnection('cache.db') - sql_base = b'SELECT * FROM scene_exceptions' - sql_where = [] - params = [] - - if exception_id: - sql_where.append(b'exception_id') - params += [exception_id] - - if indexer and indexer_id: - sql_where.append(b'indexer') - params += [indexer] - - if indexer_id: - sql_where.append(b'indexer_id') - params += [indexer_id] - - if season: - sql_where.append(b'season') - params += [season] - - if exception_type: - sql_where.append(b'custom') - params += [exception_type] - - if sql_where: - sql_base += b' WHERE ' + b' AND '.join([where + b' = ? ' for where in sql_where]) - - exceptions = cache_db_con.select(sql_base, params) - - exceptions = [{'id': row[0], - 'indexer': indexer_id_to_slug(row[1], row[2]), - 'showName': row[3], - 'season': row[4] if row[4] >= 0 else None, - 'type': 'custom' if row[5] else None} - for row in exceptions] - - if exception_id: - return self.api_finish(data=exceptions[0]) if exceptions else self.api_finish(status=404) - return self._paginate(exceptions, 'id') - - def put(self, *args, **kwargs): - """Update show information. - - :param show_id: - :type show_id: str - """ - exception_id = self._parse(kwargs.pop('row_id')) - - data = json_decode(self.request.body) - - if not all([data.get('indexerId'), - data.get('season'), - data.get('showName'), - data.get('indexer')]): - return self.api_finish(status=400, error="Invalid post body, can't update") - - cache_db_con = db.DBConnection('cache.db') - last_changes = cache_db_con.connection.total_changes - cache_db_con.action(b'UPDATE scene_exceptions' - b' set indexer = ?' - b', indexer_id = ?' - b', show_name = ?' - b', season = ?' - b', custom = 1' - b' WHERE exception_id = ?', - [data.get('indexer'), - data.get('indexerId'), - data.get('showName'), - data.get('season'), - exception_id]) - if cache_db_con.connection.total_changes - last_changes == 1: - return self.api_finish(status=204) - return self.api_finish(status=404, error="Could not update resource.") - - def post(self, *args, **kwargs): - """Add a show.""" - data = json_decode(self.request.body) - - if not all([data.get('indexerId'), - data.get('season'), - data.get('showName'), - data.get('indexer')]): - return self.api_finish(status=400, error="Invalid post body, can't update") - - cache_db_con = db.DBConnection('cache.db') - last_changes = cache_db_con.connection.total_changes - cache_db_con.action(b'INSERT INTO scene_exceptions' - b' (indexer, indexer_id, show_name, season, custom) ' - b' values (?,?,?,?,1)', - [data.get('indexer'), - data.get('indexerId'), - data.get('showName'), - data.get('season')]) - if cache_db_con.connection.total_changes - last_changes > 0: - return self.api_finish(status=200, data={"indexer": data.get('indexer'), - "indexerId": data.get('indexerId'), - "showName": data.get('showName'), - "season": data.get('season')}) - return self.api_finish(status=400, error="Could not update.") - - def delete(self, *args, **kwargs): - """Delete a show. - - :param exception_id: - :type exception_id: str - """ - exception_id = self._parse(kwargs.pop('row_id')) - - cache_db_con = db.DBConnection('cache.db') - last_changes = cache_db_con.connection.total_changes - cache_db_con.action(b'DELETE FROM scene_exceptions WHERE exception_id = ?', [exception_id]) - if cache_db_con.connection.total_changes - last_changes > 0: - return self.api_finish(status=204) - return self.api_finish(status=404, error="Resource not found, Failed to delete.") diff --git a/medusa/server/api/v2/series.py b/medusa/server/api/v2/series.py new file mode 100644 index 0000000000..851ed265aa --- /dev/null +++ b/medusa/server/api/v2/series.py @@ -0,0 +1,129 @@ +# coding=utf-8 +"""Request handler for series and episodes.""" + +from medusa.server.api.v2.base import BaseRequestHandler +from medusa.tv.series import Series, SeriesIdentifier +from tornado.escape import json_decode + + +class SeriesHandler(BaseRequestHandler): + """Series request handler.""" + + #: resource name + name = 'series' + #: identifier + identifier = ('series_slug', r'\w+') + #: path param + path_param = ('path_param', r'\w+') + #: allowed HTTP methods + allowed_methods = ('GET', 'PATCH', 'DELETE', ) + + def get(self, series_slug, path_param=None): + """Query series information. + + :param series_slug: series slug. E.g.: tvdb1234 + :param path_param: + """ + arg_paused = self._parse_boolean(self.get_argument('paused', default=None)) + + def filter_series(current): + return arg_paused is None or current.paused == arg_paused + + if not series_slug: + detailed = self._parse_boolean(self.get_argument('detailed', default=False)) + data = [s.to_json(detailed=detailed) for s in Series.find_series(predicate=filter_series)] + return self._paginate(data, sort='title') + + identifier = SeriesIdentifier.from_slug(series_slug) + if not identifier: + return self._bad_request('Invalid series slug') + + series = Series.find_by_identifier(identifier, predicate=filter_series) + if not series: + return self._not_found('Series not found') + + detailed = self._parse_boolean(self.get_argument('detailed', default=True)) + data = series.to_json(detailed=detailed) + if path_param: + if path_param not in data: + return self._bad_request("Invalid path parameter'{0}'".format(path_param)) + data = data[path_param] + + return self._ok(data) + + def post(self, series_slug=None, path_param=None): + """Add a new series.""" + if series_slug is not None: + return self._bad_request('Series slug should not be specified') + + data = json_decode(self.request.body) + if not data or 'id' not in data: + return self._bad_request('Invalid series data') + + ids = {k: v for k, v in data['id'].items() if k != 'imdb'} + if len(ids) != 1: + return self._bad_request('Only 1 indexer identifier should be specified') + + identifier = SeriesIdentifier.from_slug('{slug}{id}'.format(slug=ids.keys()[0], id=ids.values()[0])) + if not identifier: + return self._bad_request('Invalid series identifier') + + series = Series.find_by_identifier(identifier) + if series: + return self._conflict('Series already exist added') + + series = Series.from_identifier(identifier) + if not Series.save_series(series): + return self._not_found('Series not found in the specified indexer') + + return self._created(series.to_json(), identifier=identifier.slug) + + def patch(self, series_slug, path_param=None): + """Patch series.""" + if not series_slug: + return self._method_not_allowed('Patching multiple series are not allowed') + + identifier = SeriesIdentifier.from_slug(series_slug) + if not identifier: + return self._bad_request('Invalid series identifier') + + series = Series.find_by_identifier(identifier) + if not series: + return self._not_found('Series not found') + + data = json_decode(self.request.body) + indexer_id = data.get('id', {}).get(identifier.indexer.slug) + if indexer_id is not None and indexer_id != identifier.id: + return self._bad_request('Conflicting series identifier') + + done = {} + for key, value in data.items(): + if key == 'pause': + if value is True: + series.pause() + elif value is False: + series.unpause() + else: + return self._bad_request('Invalid request body: pause') + done[key] = value + + return self._ok(done) + + def delete(self, series_slug, path_param=None): + """Delete the series.""" + if not series_slug: + return self._method_not_allowed('Deleting multiple series are not allowed') + + identifier = SeriesIdentifier.from_slug(series_slug) + if not identifier: + return self._bad_request('Invalid series identifier') + + series = Series.find_by_identifier(identifier) + if not series: + return self._not_found('Series not found') + + remove_files = self._parse_boolean(self.get_argument('remove-files', default=None)) + if not series.delete(remove_files): + return self._conflict('Unable to delete series') + + return self._no_content() diff --git a/medusa/server/api/v2/series_asset.py b/medusa/server/api/v2/series_asset.py new file mode 100644 index 0000000000..e397bc2642 --- /dev/null +++ b/medusa/server/api/v2/series_asset.py @@ -0,0 +1,36 @@ +# coding=utf-8 +"""Request handler for series assets.""" + +from medusa.server.api.v2.base import BaseRequestHandler +from medusa.server.api.v2.series import SeriesHandler +from medusa.tv.series import Series, SeriesIdentifier + + +class SeriesAssetHandler(BaseRequestHandler): + """Series Asset request handler.""" + + #: parent resource handler + parent_handler = SeriesHandler + #: resource name + name = 'asset' + #: identifier + identifier = ('identifier', r'[a-zA-Z]+') + #: allowed HTTP methods + allowed_methods = ('GET', ) + + def get(self, series_slug, identifier, *args, **kwargs): + """Get an asset.""" + series_identifier = SeriesIdentifier.from_slug(series_slug) + if not series_identifier: + return self._bad_request('Invalid series slug') + + series = Series.find_by_identifier(series_identifier) + if not series: + return self._not_found('Series not found') + + asset_type = identifier or 'banner' + asset = series.get_asset(asset_type) + if not asset: + return self._not_found('Asset not found') + + self._ok(stream=asset.get_media(), content_type=asset.get_media_type()) diff --git a/medusa/server/api/v2/series_legacy.py b/medusa/server/api/v2/series_legacy.py new file mode 100644 index 0000000000..83106e749d --- /dev/null +++ b/medusa/server/api/v2/series_legacy.py @@ -0,0 +1,48 @@ +# coding=utf-8 +"""Request handler for series operations.""" + +from medusa.server.api.v2.base import BaseRequestHandler +from medusa.server.api.v2.series import SeriesHandler +from medusa.tv.series import Series, SeriesIdentifier + + +class SeriesLegacyHandler(BaseRequestHandler): + """To be removed/redesigned.""" + + #: parent resource handler + parent_handler = SeriesHandler + #: resource name + name = 'legacy' + #: identifier + identifier = ('identifier', r'\w+') + #: path param + path_param = None + #: allowed HTTP methods + allowed_methods = ('GET', ) + + def get(self, series_slug, identifier): + """Query series information. + + :param series_slug: series slug. E.g.: tvdb1234 + :param identifier: + """ + series_identifier = SeriesIdentifier.from_slug(series_slug) + if not series_identifier: + return self._bad_request('Invalid series slug') + + series = Series.find_by_identifier(series_identifier) + if not series: + return self._not_found('Series not found') + + if identifier == 'backlogged': + # TODO: revisit + allowed_qualities = self._parse(self.get_argument('allowed', default=None), str) + allowed_qualities = map(int, allowed_qualities.split(',')) if allowed_qualities else [] + preferred_qualities = self._parse(self.get_argument('preferred', default=None), str) + preferred_qualities = map(int, preferred_qualities.split(',')) if preferred_qualities else [] + new, existing = series.get_backlogged_episodes(allowed_qualities=allowed_qualities, + preferred_qualities=preferred_qualities) + data = {'new': new, 'existing': existing} + return self._ok(data=data) + + return self._bad_request('Invalid request') diff --git a/medusa/server/api/v2/series_operation.py b/medusa/server/api/v2/series_operation.py new file mode 100644 index 0000000000..7ea640f07a --- /dev/null +++ b/medusa/server/api/v2/series_operation.py @@ -0,0 +1,46 @@ +# coding=utf-8 +"""Request handler for series operations.""" + +from medusa.server.api.v2.base import BaseRequestHandler +from medusa.server.api.v2.series import SeriesHandler +from medusa.tv.series import Series, SeriesIdentifier +from tornado.escape import json_decode + + +class SeriesOperationHandler(BaseRequestHandler): + """Operation request handler for series.""" + + #: parent resource handler + parent_handler = SeriesHandler + #: resource name + name = 'operation' + #: identifier + identifier = None + #: path param + path_param = None + #: allowed HTTP methods + allowed_methods = ('POST', ) + + def post(self, series_slug): + """Query series information. + + :param series_slug: series slug. E.g.: tvdb1234 + """ + series_identifier = SeriesIdentifier.from_slug(series_slug) + if not series_identifier: + return self._bad_request('Invalid series slug') + + series = Series.find_by_identifier(series_identifier) + if not series: + return self._not_found('Series not found') + + data = json_decode(self.request.body) + if not data or not all([data.get('type')]) or len(data) != 1: + return self._bad_request('Invalid request body') + + if data['type'] == 'ARCHIVE_EPISODES': + if series.set_all_episodes_archived(final_status_only=True): + return self._created() + return self._no_content() + + return self._bad_request('Invalid operation') diff --git a/medusa/server/api/v2/show.py b/medusa/server/api/v2/show.py deleted file mode 100644 index a2cd38b649..0000000000 --- a/medusa/server/api/v2/show.py +++ /dev/null @@ -1,207 +0,0 @@ -# coding=utf-8 -"""Request handler for shows.""" - -from tornado.escape import json_decode -from .base import BaseRequestHandler -from .... import app -from ....indexers.indexer_config import indexer_name_to_id -from ....show.show import Show -from ....show_queue import ShowQueueActions - - -class EpisodeIdentifier(object): - """Episode Identifier.""" - - def __init__(self, season, episode, absolute_episode, air_date): - """Default constructor.""" - self.season = season - self.episode = episode - self.absolute_episode = absolute_episode - self.air_date = air_date - - def __bool__(self): - """Boolean function.""" - return (self.season or self.episode or self.absolute_episode or self.air_date) is not None - - __nonzero__ = __bool__ - - -class ShowHandler(BaseRequestHandler): - """Shows request handler.""" - - def set_default_headers(self): - """Set default CORS headers.""" - super(ShowHandler, self).set_default_headers() - self.set_header('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE, OPTIONS') - - def get(self, show_indexer, show_id, season, episode, absolute_episode, air_date, query): - """Query show information. - - :param show_indexer: - :param show_id: - :type show_id: str - :param season: - :param episode: - :param absolute_episode: - :param air_date: - :param query: - """ - # @TODO: This should be completely replaced with show_id - # indexer_cfg = indexerConfig.get(reverse_mappings.get('{0}_id'.format(show_indexer))) if show_indexer else None - # show_indexer = indexer_cfg['id'] if indexer_cfg else None - show_indexer = indexer_name_to_id(show_indexer) if show_indexer else None - indexerid = self._parse(show_id) - season = self._parse(season) - episode = self._parse(episode) - absolute_episode = self._parse(absolute_episode) - air_date = self._parse_date(air_date) - - # @TODO: https://github.com/SiCKRAGETV/SiCKRAGE/pull/2558 - - arg_paused = self._parse(self.get_argument('paused', default=None)) - if show_id is not None: - tv_show = Show.find(app.showList, indexerid, show_indexer) - if not self._match(tv_show, arg_paused): - return self.api_finish(status=404, error='Show not found') - - ep_id = EpisodeIdentifier(season, episode, absolute_episode, air_date) - if ep_id or query == 'episodes': - return self._handle_episode(tv_show, ep_id, query) - - return self._handle_detailed_show(tv_show, query) - - data = [s.to_json(detailed=self.get_argument('detailed', default=False)) for s in app.showList if self._match(s, arg_paused)] - return self._paginate(data, 'title') - - @staticmethod - def _match(tv_show, paused): - return tv_show and (paused is None or tv_show.paused == paused) - - def _handle_detailed_show(self, tv_show, query): - if query: - if query == 'backlogged': - # TODO: revisit - allowed_qualities = self._parse(self.get_argument('allowed', default=None), str) - allowed_qualities = map(int, allowed_qualities.split(',')) if allowed_qualities else [] - preferred_qualities = self._parse(self.get_argument('preferred', default=None), str) - preferred_qualities = map(int, preferred_qualities.split(',')) if preferred_qualities else [] - new, existing = tv_show.get_backlogged_episodes(allowed_qualities=allowed_qualities, - preferred_qualities=preferred_qualities) - data = {'new': new, 'existing': existing} - elif query == 'archiveEpisodes': - # TODO: GET should never change data and the return should always be an episode, - # not this funny dict - data = {'archived': 'true' if tv_show.set_all_episodes_archived(final_status_only=True) else 'false'} - elif query == 'queue': - # TODO: revisit - action, message = app.show_queue_scheduler.action.get_queue_action(tv_show) - data = { - 'action': ShowQueueActions.names[action], - 'message': message, - } if action is not None else dict() - else: - data = tv_show.to_json() - if query in data: - data = data[query] - else: - return self.api_finish(status=400, error="Invalid resource path '{0}'".format(query)) - else: - data = tv_show.to_json() - self.api_finish(data=data) - - def _handle_episode(self, tv_show, ep_id, query): - if (ep_id.episode or ep_id.absolute_episode or ep_id.air_date) is not None: - tv_episode = self._find_tv_episode(tv_show=tv_show, ep_id=ep_id) - if not tv_episode: - return self.api_finish(status=404, error='Episode not found') - return self._handle_detailed_episode(tv_episode, query) - - tv_episodes = tv_show.get_all_episodes(season=ep_id.season) - data = [e.to_json(detailed=False) for e in tv_episodes] - - return self._paginate(data, 'airDate') - - @staticmethod - def _find_tv_episode(tv_show, ep_id): - """Find Episode based on specified criteria. - - :param tv_show: - :param ep_id: - :return: - :rtype: medusa.tv.Episode or tuple(int, string) - """ - if ep_id.season is not None and ep_id.episode is not None: - tv_episode = tv_show.get_episode(season=ep_id.season, episode=ep_id.episode, should_cache=False) - elif ep_id.absolute_episode is not None: - tv_episode = tv_show.get_episode(absolute_number=ep_id.absolute_episode, should_cache=False) - elif ep_id.air_date: - tv_episode = tv_show.get_episode(air_date=ep_id.air_date, should_cache=False) - else: - # if this happens then it's a bug! - raise ValueError - - if tv_episode: - if not tv_episode.loaded: - tv_episode.load_from_db(tv_episode.season, tv_episode.episode) - return tv_episode - - def _handle_detailed_episode(self, tv_episode, query): - data = tv_episode.to_json() - if query: - if query == 'metadata': - data = tv_episode.metadata() if tv_episode.is_location_valid() else dict() - elif query in data: - data = data[query] - else: - return self.api_finish(status=400, error="Invalid resource path '{0}'".format(query)) - - return self.api_finish(data=data) - - def put(self, show_id): - """Replace whole show object. - - :param show_id: - :type show_id: str - """ - return self.api_finish() - - def patch(self, show_indexer, show_id, *args, **kwargs): - """Update show object.""" - # @TODO: This should be completely replaced with show_id - # indexer_cfg = indexerConfig.get(reverse_mappings.get('{0}_id'.format(show_indexer))) if show_indexer else None - # show_indexer = indexer_cfg['id'] if indexer_cfg else None - show_indexer = indexer_name_to_id(show_indexer) - indexerid = self._parse(show_id) - - if show_id is not None: - tv_show = Show.find(app.showList, indexerid, show_indexer) - print(tv_show) - - data = json_decode(self.request.body) - done_data = {} - done_errors = [] - for key in data.keys(): - if key == 'pause' and str(data['pause']).lower() in ['true', 'false']: - error, _ = Show.pause(indexerid, data['pause']) - if error is not None: - self.api_finish(error=error) - else: - done_data['pause'] = data['pause'] - if len(done_errors): - print('Can\'t PATCH [' + ', '.join(done_errors) + '] since ' + ["it's a static field.", "they're static fields."][len(done_errors) > 1]) - self.api_finish(data=done_data) - else: - return self.api_finish(status=404, error='Show not found') - - def post(self): - """Add a show.""" - return self.api_finish() - - def delete(self, show_id): - """Delete a show. - - :param show_id: - :type show_id: str - """ - error, show = Show.delete(indexer_id=show_id, remove_files=self.get_argument('remove_files', default=False)) - return self.api_finish(error=error, data=show) diff --git a/medusa/server/api/v2/status.py b/medusa/server/api/v2/status.py deleted file mode 100644 index 07b19b03e9..0000000000 --- a/medusa/server/api/v2/status.py +++ /dev/null @@ -1,16 +0,0 @@ -# coding=utf-8 -"""Request handler for server status.""" - -from .base import BaseRequestHandler - - -class StatusHandler(BaseRequestHandler): - """Status request handler.""" - - def get(self, query=''): - """Query server status. - - :param query: - :type query: str - """ - self.api_finish() diff --git a/medusa/server/core.py b/medusa/server/core.py index d77643b7f7..718c39fe20 100644 --- a/medusa/server/core.py +++ b/medusa/server/core.py @@ -5,6 +5,20 @@ import os import threading +from medusa.server.api.v2.alias import AliasHandler +from medusa.server.api.v2.alias_source import ( + AliasSourceHandler, + AliasSourceOperationHandler, +) +from medusa.server.api.v2.auth import AuthHandler +from medusa.server.api.v2.base import NotFoundHandler +from medusa.server.api.v2.config import ConfigHandler +from medusa.server.api.v2.episode import EpisodeHandler +from medusa.server.api.v2.log import LogHandler +from medusa.server.api.v2.series import SeriesHandler +from medusa.server.api.v2.series_asset import SeriesAssetHandler +from medusa.server.api.v2.series_legacy import SeriesLegacyHandler +from medusa.server.api.v2.series_operation import SeriesOperationHandler from tornado.httpserver import HTTPServer from tornado.ioloop import IOLoop from tornado.web import Application, RedirectHandler, StaticFileHandler, url @@ -17,42 +31,39 @@ def get_apiv2_handlers(base): """Return api v2 handlers.""" - from .api.v2.config import ConfigHandler - from .api.v2.log import LogHandler - from .api.v2.show import ShowHandler - from .api.v2.auth import AuthHandler - from .api.v2.asset import AssetHandler - from .api.v2.base import NotFoundHandler - from .api.v2.scene_exception import (SceneExceptionHandler, SceneExceptionTypeHandler, - SceneExceptionAllTypeOperationHandler, SceneExceptionTypeOperationHandler) - - show_id = r'(?P[a-z]+)(?P\d+)' - # This has to accept season of 1-4 as some seasons are years. For example Formula 1 - ep_id = r'(?:(?:s(?P\d{1,4})(?:e(?P\d{1,2}))?)|(?:e(?P\d{1,3}))' \ - r'|(?P\d{4}\-\d{2}\-\d{2}))' - query = r'(?P[\w]+)' - query_extended = r'(?P[\w \(\)%]+)' # This also accepts the space char, () and % - log_level = r'(?P[a-zA-Z]+)' - asset_group = r'(?P[a-zA-Z0-9]+)' - return [ - # All operations endpoints should be defined first. - (r'{base}/exceptiontype/(?P[a-z]+)/operation?/?'.format(base=base), SceneExceptionTypeOperationHandler), - (r'{base}/exceptiontype/operation?/?'.format(base=base), SceneExceptionAllTypeOperationHandler), - - # Regular REST routes - (r'{base}/show(?:/{show_id}(?:/{ep_id})?(?:/{query})?)?/?'.format(base=base, show_id=show_id, ep_id=ep_id, - query=query), ShowHandler), - (r'{base}/config(?:/{query})?/?'.format(base=base, query=query), ConfigHandler), - (r'{base}/log(?:/{log_level})?/?'.format(base=base, log_level=log_level), LogHandler), - (r'{base}/authenticate(/?)'.format(base=base), AuthHandler), - (r'{base}/asset(?:/{asset_group})(?:/{query})?/?'.format(base=base, asset_group=asset_group, - query=query_extended), AssetHandler), - (r'{base}/sceneexception(?:/(?P\d+)?)?/?'.format(base=base), SceneExceptionHandler), - (r'{base}/exceptiontype(?:/(?P[a-z]+)?)?/?'.format(base=base), SceneExceptionTypeHandler), + # Order: Most specific to most generic + # /api/v2/series/tvdb1234/episode + EpisodeHandler.create_app_handler(base), + + # /api/v2/series/tvdb1234/operation + SeriesOperationHandler.create_app_handler(base), + # /api/v2/series/tvdb1234/asset + SeriesAssetHandler.create_app_handler(base), + # /api/v2/series/tvdb1234/legacy + SeriesLegacyHandler.create_app_handler(base), # To be removed + # /api/v2/series/tvdb1234 + SeriesHandler.create_app_handler(base), + + # /api/v2/config + ConfigHandler.create_app_handler(base), + + # /api/v2/log + LogHandler.create_app_handler(base), + + # /api/v2/alias-source/xem/operation + AliasSourceOperationHandler.create_app_handler(base), + # /api/v2/alias-source + AliasSourceHandler.create_app_handler(base), + + # /api/v2/alias + AliasHandler.create_app_handler(base), + + # /api/v2/authenticate + AuthHandler.create_app_handler(base), # Always keep this last! - (r'{base}(/?.*)'.format(base=base), NotFoundHandler), + NotFoundHandler.create_app_handler(base) ] diff --git a/medusa/show/coming_episodes.py b/medusa/show/coming_episodes.py index 512fed454a..19b3b9e449 100644 --- a/medusa/show/coming_episodes.py +++ b/medusa/show/coming_episodes.py @@ -19,6 +19,7 @@ from datetime import date, timedelta from medusa.helpers.quality import get_quality_string +from medusa.tv.series import SeriesIdentifier from .. import app from ..common import IGNORED, Quality, UNAIRED, WANTED from ..db import DBConnection @@ -113,6 +114,7 @@ def get_coming_episodes(categories, sort, group, paused=app.COMING_EPS_DISPLAY_P results = [dict(result) for result in results] for index, item in enumerate(results): + item['series_slug'] = str(SeriesIdentifier.from_id(int(item['indexer']), item['indexer_id'])) results[index]['localtime'] = sbdatetime.convert_to_setting( parse_date_time(item['airdate'], item['airs'], item['network'])) diff --git a/medusa/tv/__init__.py b/medusa/tv/__init__.py index 965c266831..7c35c4bc96 100644 --- a/medusa/tv/__init__.py +++ b/medusa/tv/__init__.py @@ -3,4 +3,5 @@ from medusa.tv.base import TV from medusa.tv.cache import Cache from medusa.tv.episode import Episode +from medusa.tv.indexer import Indexer from medusa.tv.series import Series diff --git a/medusa/tv/base.py b/medusa/tv/base.py index fe76e9b4dc..4e8f229dd2 100644 --- a/medusa/tv/base.py +++ b/medusa/tv/base.py @@ -1,18 +1,23 @@ # coding=utf-8 +"""TV base class.""" -"""TVShow and TVEpisode classes.""" - -import datetime -import shutil import threading from medusa.indexers.indexer_config import INDEXER_TVDBV2 -import shutil_custom -shutil.copyfile = shutil_custom.copyfile_custom +class Identifier(object): + """Base identifier class.""" + + def __nonzero__(self): + """Magic method.""" + raise NotImplementedError + + __bool__ = __nonzero__ -MILLIS_YEAR_1900 = datetime.datetime(year=1900, month=1, day=1).toordinal() + def __ne__(self, other): + """Magic method.""" + return not self == other class TV(object): diff --git a/medusa/tv/episode.py b/medusa/tv/episode.py index 12a3dda5f1..553a2ef0a3 100644 --- a/medusa/tv/episode.py +++ b/medusa/tv/episode.py @@ -19,16 +19,12 @@ from __future__ import unicode_literals -import datetime import logging import os.path import re -import shutil import time -from collections import ( - OrderedDict, -) +from datetime import date, datetime import knowit from medusa import ( @@ -53,6 +49,7 @@ WANTED, statusStrings, ) +from medusa.helper.collections import NonEmptyDict from medusa.helper.common import ( dateFormat, dateTimeFormat, @@ -87,20 +84,141 @@ get_scene_numbering, xem_refresh, ) -from medusa.tv.base import TV - -import shutil_custom +from medusa.tv.base import Identifier, TV try: import xml.etree.cElementTree as ETree except ImportError: import xml.etree.ElementTree as ETree -shutil.copyfile = shutil_custom.copyfile_custom +logger = logging.getLogger(__name__) -MILLIS_YEAR_1900 = datetime.datetime(year=1900, month=1, day=1).toordinal() -logger = logging.getLogger(__name__) +class EpisodeNumber(Identifier): + """Episode Number: season/episode, absolute or air by date.""" + + date_fmt = '%Y-%m-%d' + regex = re.compile(r'\b(?:(?P\d{4}-\d{2}-\d{2})|' + r'(?:s(?P\d{1,4}))(?:e(?P\d{1,2}))|' + r'(?:e(?P\d{1,3})))\b', re.IGNORECASE) + + @classmethod + def from_slug(cls, slug): + """Create episode number from slug. E.g.: s01e02.""" + match = cls.regex.match(slug) + if match: + try: + result = {k: int(v) if k != 'air_date' else datetime.strptime(v, cls.date_fmt) + for k, v in match.groupdict().items() if v is not None} + if result: + if 'air_date' in result: + return AirByDateNumber(**result) + if 'season' in result and 'episode' in result: + return RelativeNumber(**result) + if 'abs_episode' in result: + return AbsoluteNumber(**result) + except ValueError: + pass + + +class RelativeNumber(Identifier): + """Regular episode number: season and episode.""" + + def __init__(self, season, episode): + """Constructor. + + :param season: + :type season: int + :param episode: + :type episode: int + """ + self.season = season + self.episode = episode + + def __nonzero__(self): + """Magic method.""" + return self.season is not None and self.episode is not None + + def __repr__(self): + """Magic method.""" + return ''.format(self.season, self.episode) + + def __str__(self): + """Magic method.""" + return 's{0:02d}e{1:02d}'.format(self.season, self.episode) + + def __hash__(self): + """Magic method.""" + return hash((self.season, self.episode)) + + def __eq__(self, other): + """Magic method.""" + return isinstance(other, RelativeNumber) and ( + self.season == other.season and self.episode == other.episode) + + +class AbsoluteNumber(EpisodeNumber): + """Episode number class that handles absolute episode numbers.""" + + def __init__(self, abs_episode): + """Constructor. + + :param abs_episode: + :type abs_episode: int + """ + self.episode = abs_episode + + def __nonzero__(self): + """Magic method.""" + return self.episode is not None + + def __repr__(self): + """Magic method.""" + return ''.format(self.episode) + + def __str__(self): + """Magic method.""" + return 'e{0:02d}'.format(self.episode) + + def __hash__(self): + """Magic method.""" + return hash(self.episode) + + def __eq__(self, other): + """Magic method.""" + return isinstance(other, AbsoluteNumber) and self.episode == other.episode + + +class AirByDateNumber(EpisodeNumber): + """Episode number class that handles air-by-date episode numbers.""" + + def __init__(self, air_date): + """Constructor. + + :param air_date: + :type air_date: datetime + """ + self.air_date = air_date + + def __nonzero__(self): + """Magic method.""" + return self.air_date is not None + + def __repr__(self): + """Magic method.""" + return ''.format(self.air_date) + + def __str__(self): + """Magic method.""" + return self.air_date.strftime(self.date_fmt) + + def __hash__(self): + """Magic method.""" + return hash(self.air_date) + + def __eq__(self, other): + """Magic method.""" + return isinstance(other, AirByDateNumber) and self.air_date == other.air_date class Episode(TV): @@ -128,8 +246,8 @@ def __init__(self, show, season, episode, filepath=''): self.description = '' self.subtitles = list() self.subtitles_searchcount = 0 - self.subtitles_lastsearch = str(datetime.datetime.min) - self.airdate = datetime.date.fromordinal(1) + self.subtitles_lastsearch = str(datetime.min) + self.airdate = date.fromordinal(1) self.hasnfo = False self.hastbn = False self.status = UNKNOWN @@ -150,6 +268,36 @@ def __init__(self, show, season, episode, filepath=''): self._specify_episode(self.season, self.episode) self.check_for_meta_files() + @classmethod + def find_by_series_and_episode(cls, series, episode_number): + """Find Episode based on series and episode number. + + :param series: + :type series: medusa.tv.series.Series + :param episode_number: + :type episode_number: EpisodeNumber + :return: + :rtype: medusa.tv.Episode + """ + if isinstance(episode_number, RelativeNumber): + episode = series.get_episode(season=episode_number.season, episode=episode_number.episode, + should_cache=False, no_create=True) + elif isinstance(episode_number, AbsoluteNumber): + episode = series.get_episode(absolute_number=episode_number.episode, + should_cache=False, no_create=True) + + elif isinstance(episode_number, AirByDateNumber): + episode = series.get_episode(air_date=episode_number.air_date, + should_cache=False, no_create=True) + else: + # if this happens then it's a bug! + raise ValueError + + if episode: + if not episode.loaded: + episode.load_from_db(episode.season, episode.episode) + return episode + @staticmethod def from_filepath(filepath): """Return an Episode for the given filepath. @@ -213,6 +361,27 @@ def location(self, value): self._location = value self.file_size = os.path.getsize(value) if value and self.is_location_valid(value) else 0 + @property + def indexer_name(self): + """Return the indexer name identifier. Example: tvdb.""" + return indexerConfig[self.indexer].get('identifier') + + @property + def air_date(self): + """Return air date from the episode.""" + return sbdatetime.convert_to_setting( + network_timezones.parse_date_time( + date.toordinal(self.airdate), + self.show.airs, + self.show.network + ) + ).isoformat(b'T') + + @property + def status_name(self): + """Return the status name.""" + return statusStrings[Quality.split_composite_status(self.status).status] + def is_location_valid(self, location=None): """Whether the location is a valid file. @@ -260,7 +429,7 @@ def download_subtitles(self, lang=None): self.subtitles = subtitles.merge_subtitles(self.subtitles, new_subtitles) self.subtitles_searchcount += 1 if self.subtitles_searchcount else 1 - self.subtitles_lastsearch = datetime.datetime.now().strftime(dateTimeFormat) + self.subtitles_lastsearch = datetime.now().strftime(dateTimeFormat) logger.debug('{id}: Saving last subtitles search to database', id=self.show.indexerid) self.save_to_db() @@ -383,7 +552,7 @@ def load_from_db(self, season, episode): self.subtitles = sql_results[0][b'subtitles'].split(',') self.subtitles_searchcount = sql_results[0][b'subtitles_searchcount'] self.subtitles_lastsearch = sql_results[0][b'subtitles_lastsearch'] - self.airdate = datetime.date.fromordinal(int(sql_results[0][b'airdate'])) + self.airdate = date.fromordinal(int(sql_results[0][b'airdate'])) self.status = int(sql_results[0][b'status'] or -1) # don't overwrite my location @@ -517,11 +686,11 @@ def load_from_indexer(self, season=None, episode=None, tvapi=None, cached_season firstaired = getattr(my_ep, 'firstaired', None) if not firstaired or firstaired == '0000-00-00': - firstaired = str(datetime.date.fromordinal(1)) + firstaired = str(date.fromordinal(1)) raw_airdate = [int(x) for x in firstaired.split('-')] try: - self.airdate = datetime.date(raw_airdate[0], raw_airdate[1], raw_airdate[2]) + self.airdate = date(raw_airdate[0], raw_airdate[1], raw_airdate[2]) except (ValueError, IndexError): logger.warning('{id}: Malformed air date of {aired} retrieved from {indexer} for {show} {ep}', id=self.show.indexerid, aired=firstaired, indexer=indexerApi(self.indexer).name, @@ -554,7 +723,7 @@ def load_from_indexer(self, season=None, episode=None, tvapi=None, cached_season status=statusStrings[self.status].upper(), location=self.location) if not os.path.isfile(self.location): - if (self.airdate >= datetime.date.today() or self.airdate == datetime.date.fromordinal(1)) and \ + if (self.airdate >= date.today() or self.airdate == date.fromordinal(1)) and \ self.status in (UNAIRED, UNKNOWN, WANTED): # Need to check if is UNAIRED otherwise code will step into second 'IF' # and make episode as default_ep_status @@ -675,9 +844,9 @@ def __load_from_nfo(self, location): if ep_details.findtext('aired'): raw_airdate = [int(x) for x in ep_details.findtext('aired').split('-')] - self.airdate = datetime.date(raw_airdate[0], raw_airdate[1], raw_airdate[2]) + self.airdate = date(raw_airdate[0], raw_airdate[1], raw_airdate[2]) else: - self.airdate = datetime.date.fromordinal(1) + self.airdate = date.fromordinal(1) self.hasnfo = True else: @@ -706,48 +875,52 @@ def __str__(self): def to_json(self, detailed=True): """Return the json representation.""" - indexer_name = indexerConfig[self.indexer]['identifier'] - parsed_airdate = sbdatetime.convert_to_setting( - network_timezones.parse_date_time( - datetime.datetime.toordinal(self.airdate), - self.show.airs, - self.show.network - ) - ).isoformat(b'T') - data = OrderedDict([ - ('identifier', self.identifier), - ('id', OrderedDict([ - (indexer_name, self.indexerid), - ])), - ('season', self.season), - ('episode', self.episode), - ('absoluteNumber', self.absolute_number), - ('airDate', parsed_airdate), - ('title', self.name), - ('description', self.description), - ('hasNfo', self.hasnfo), - ('hasTbn', self.hastbn), - ('subtitles', self.subtitles), - ('status', statusStrings[Quality.split_composite_status(self.status).status]), - ('releaseName', self.release_name), - ('isProper', self.is_proper), - ('version', self.version), - ('scene', OrderedDict([ - ('season', self.scene_season), - ('episode', self.scene_episode), - ('absoluteNumber', self.scene_absolute_number), - ])), - ('location', self.location), - ('fileSize', self.file_size), - ]) + data = NonEmptyDict() + data['identifier'] = self.identifier + data['id'] = {self.indexer_name: self.indexerid} + data['season'] = self.season + data['episode'] = self.episode + + if self.absolute_number: + data['absoluteNumber'] = self.absolute_number + + data['airDate'] = self.air_date + data['title'] = self.name + data['description'] = self.description + data['content'] = [] + data['title'] = self.name + data['subtitles'] = self.subtitles + data['status'] = self.status_name + data['release'] = NonEmptyDict() + data['release']['name'] = self.release_name + data['release']['group'] = self.release_group + data['release']['proper'] = self.is_proper + data['release']['version'] = self.version + data['scene'] = NonEmptyDict() + data['scene']['season'] = self.scene_season + data['scene']['episode'] = self.scene_episode + + if self.scene_absolute_number: + data['scene']['absoluteNumber'] = self.scene_absolute_number + + data['file'] = NonEmptyDict() + data['file']['location'] = self.location + if self.file_size: + data['file']['size'] = self.file_size + + if self.hasnfo: + data['content'].append('NFO') + if self.hastbn: + data['content'].append('thumbnail') + if detailed: - data.update(OrderedDict([ - ('releaseGroup', self.release_group), - ('subtitlesSearchCount', self.subtitles_searchcount), - ('subtitlesLastSearched', self.subtitles_lastsearch), - ('wantedQualities', self.wanted_quality), - ('relatedEpisodes', [ep.identifier() for ep in self.related_episodes]), - ])) + data['statistics'] = NonEmptyDict() + data['statistics']['subtitleSearch'] = NonEmptyDict() + data['statistics']['subtitleSearch']['last'] = self.subtitles_lastsearch + data['statistics']['subtitleSearch']['count'] = self.subtitles_searchcount + data['wantedQualities'] = self.wanted_quality + data['wantedQualities'] = [ep.identifier() for ep in self.related_episodes] + return data def create_meta_files(self): @@ -1150,9 +1323,9 @@ def release_group(show, name): '%Y': str(self.airdate.year), '%M': str(self.airdate.month), '%D': str(self.airdate.day), - '%CY': str(datetime.date.today().year), - '%CM': str(datetime.date.today().month), - '%CD': str(datetime.date.today().day), + '%CY': str(date.today().year), + '%CM': str(date.today().month), + '%CD': str(date.today().day), '%0M': '%02d' % self.airdate.month, '%0D': '%02d' % self.airdate.day, '%RT': 'PROPER' if self.is_proper else '', @@ -1533,7 +1706,7 @@ def airdate_modify_stamp(self): if app.FILE_TIMESTAMP_TIMEZONE == 'local': airdatetime = airdatetime.astimezone(network_timezones.app_timezone) - filemtime = datetime.datetime.fromtimestamp( + filemtime = datetime.fromtimestamp( os.path.getmtime(self.location)).replace(tzinfo=network_timezones.app_timezone) if filemtime != airdatetime: diff --git a/medusa/tv/indexer.py b/medusa/tv/indexer.py new file mode 100644 index 0000000000..d55c5657bf --- /dev/null +++ b/medusa/tv/indexer.py @@ -0,0 +1,54 @@ +# coding=utf-8 +"""Indexer class.""" + +from medusa.indexers.indexer_config import indexer_id_to_name, indexer_name_to_id +from medusa.tv.base import Identifier + + +class Indexer(Identifier): + """Represent an Indexer with id and slug name.""" + + def __init__(self, identifier): + """Constructor. + + :param identifier: + :type identifier: int + """ + self.id = identifier + + @classmethod + def from_slug(cls, slug): + """Create Indexer from slug.""" + identifier = indexer_name_to_id(slug) + if identifier is not None: + return Indexer(identifier) + + @classmethod + def from_id(cls, pk): + """Create Indexer from id.""" + return Indexer(pk) + + @property + def slug(self): + """Slug name.""" + return indexer_id_to_name(self.id) + + def __nonzero__(self): + """Magic method bool.""" + return self.id is not None + + def __repr__(self): + """Magic method.""" + return ''.format(self.slug, self.id) + + def __str__(self): + """Magic method.""" + return str(self.slug) + + def __hash__(self): + """Magic method.""" + return hash(self.id) + + def __eq__(self, other): + """Magic method.""" + return isinstance(other, Indexer) and self.id == other.id diff --git a/medusa/tv/series.py b/medusa/tv/series.py index b4bc67499c..e9e6492352 100644 --- a/medusa/tv/series.py +++ b/medusa/tv/series.py @@ -29,7 +29,6 @@ import traceback import warnings from collections import ( - OrderedDict, namedtuple, ) from itertools import groupby @@ -65,12 +64,14 @@ qualityPresets, statusStrings, ) +from medusa.helper.collections import NonEmptyDict from medusa.helper.common import ( episode_num, pretty_file_size, try_int, ) from medusa.helper.exceptions import ( + CantRemoveShowException, EpisodeDeletedException, EpisodeNotFoundException, MultipleShowObjectsException, @@ -80,19 +81,24 @@ ex, ) from medusa.helpers.externals import get_externals +from medusa.image_cache import ImageCache from medusa.indexers.indexer_api import indexerApi from medusa.indexers.indexer_config import ( INDEXER_TVRAGE, indexerConfig, indexer_id_to_slug, mappings, - reverse_mappings -) + reverse_mappings, + slug_to_indexer_id) from medusa.indexers.indexer_exceptions import ( IndexerAttributeNotFound, IndexerException, IndexerSeasonNotFound, ) +from medusa.media.banner import ShowBanner +from medusa.media.fan_art import ShowFanArt +from medusa.media.network_logo import ShowNetworkLogo +from medusa.media.poster import ShowPoster from medusa.name_parser.parser import ( InvalidNameException, InvalidShowException, @@ -101,10 +107,9 @@ from medusa.sbdatetime import sbdatetime from medusa.scene_exceptions import get_scene_exceptions from medusa.show.show import Show -from medusa.tv.base import TV +from medusa.tv.base import Identifier, TV from medusa.tv.episode import Episode - -import shutil_custom +from medusa.tv.indexer import Indexer from six import text_type @@ -114,13 +119,71 @@ app.TRASH_REMOVE_SHOW = 0 -shutil.copyfile = shutil_custom.copyfile_custom - MILLIS_YEAR_1900 = datetime.datetime(year=1900, month=1, day=1).toordinal() logger = logging.getLogger(__name__) +class SeriesIdentifier(Identifier): + """Series identifier with indexer and indexer id.""" + + def __init__(self, indexer, identifier): + """Constructor. + + :param indexer: + :type indexer: Indexer or int + :param identifier: + :type identifier: int + """ + self.indexer = indexer if isinstance(indexer, Indexer) else Indexer.from_id(indexer) + self.id = identifier + + @classmethod + def from_slug(cls, slug): + """Create SeriesIdentifier from slug. E.g.: tvdb1234.""" + result = slug_to_indexer_id(slug) + if result is not None: + indexer, indexer_id = result + if indexer is not None and indexer_id is not None: + return SeriesIdentifier(Indexer(indexer), indexer_id) + + @classmethod + def from_id(cls, indexer, indexer_id): + """Create SeriesIdentifier from tuple (indexer, indexer_id).""" + return SeriesIdentifier(indexer, indexer_id) + + @property + def slug(self): + """Slug.""" + return str(self) + + @property + def api(self): + """Api.""" + indexer_api = indexerApi(self.indexer.id) + return indexer_api.indexer(**indexer_api.api_params) + + def __nonzero__(self): + """Magic method.""" + return self.indexer is not None and self.id is not None + + def __repr__(self): + """Magic method.""" + return ''.format(self.indexer, self.id) + + def __str__(self): + """Magic method.""" + return '{0}{1}'.format(self.indexer, self.id) + + def __hash__(self): + """Magic method.""" + return hash((self.indexer, self.id)) + + def __eq__(self, other): + """Magic method.""" + return isinstance(other, SeriesIdentifier) and self.indexer == other.indexer and self.id == other.id + + class Series(TV): """Represent a TV Show.""" @@ -147,7 +210,7 @@ def __init__(self, indexer, indexerid, lang='', quality=None, self.quality = quality or int(app.QUALITY_DEFAULT) self.flatten_folders = flatten_folders or int(app.FLATTEN_FOLDERS_DEFAULT) self.status = 'Unknown' - self.airs = '' + self._airs = '' self.start_year = 0 self.paused = 0 self.air_by_date = 0 @@ -176,6 +239,56 @@ def __init__(self, indexer, indexerid, lang='', quality=None, self._load_from_db() + @classmethod + def find_series(cls, predicate=None): + """Find series based on given predicate.""" + return [s for s in app.showList if s and (not predicate or predicate(s))] + + @classmethod + def find_by_identifier(cls, identifier, predicate=None): + """Find series by its identifier and predicate. + + :param identifier: + :type identifier: medusa.tv.series.SeriesIdentifier + :param predicate: + :type predicate: callable + :return: + :rtype: + """ + result = Show.find(app.showList, identifier.id, identifier.indexer.id) + if result and (not predicate or predicate(result)): + return result + + @classmethod + def from_identifier(cls, identifier): + """Create a series object from its identifier.""" + return Series(identifier.indexer.id, identifier.id) + + # TODO: Make this the single entry to add new series + @classmethod + def save_series(cls, series): + """Save the specified series to medusa.""" + try: + api = series.identifier.api + series.load_from_indexer(tvapi=api) + series.load_imdb_info() + app.showList.append(series) + series.save_to_db() + series.load_episodes_from_indexer(tvapi=api) + return series + except IndexerException as e: + logger.warning('Unable to load series from indexer: {0!r}'.format(e)) + + @property + def identifier(self): + """Identifier.""" + return SeriesIdentifier(self.indexer, self.indexerid) + + @property + def slug(self): + """Slug.""" + return self.identifier.slug + @property def indexer_api(self): """Get an Indexer API instance.""" @@ -268,7 +381,7 @@ def indexer_name(self): @property def indexer_slug(self): - """Return the slug name of the show. Example: tvdb1234.""" + """Return the slug name of the series. Example: tvdb1234.""" return indexer_id_to_slug(self.indexer, self.indexerid) @location.setter @@ -327,6 +440,114 @@ def subtitle_flag(self): """Subtitle flag.""" return subtitles.code_from_code(self.lang) if self.lang else '' + @property + def show_type(self): + """Return show type.""" + return 'sports' if self.is_sports else ('anime' if self.is_anime else 'series') + + @property + def imdb_year(self): + """Return series year.""" + return self.imdb_info.get('year') + + @property + def imdb_runtime(self): + """Return series runtime.""" + return self.imdb_info.get('runtimes') + + @property + def imdb_akas(self): + """Return genres akas dict.""" + akas = {} + for x in [v for v in self.imdb_info.get('akas', '').split('|') if v]: + if '::' in x: + val, key = x.split('::') + akas[key] = val + return akas + + @property + def imdb_countries(self): + """Return country codes.""" + return [v for v in self.imdb_info.get('country_codes', '').split('|') if v] + + @property + def imdb_plot(self): + """Return series plot.""" + return self.imdb_info.get('plot', '') + + @property + def imdb_genres(self): + """Return series genres.""" + return self.imdb_info.get('genres', '') + + @property + def imdb_votes(self): + """Return series votes.""" + return self.imdb_info.get('votes') + + @property + def imdb_rating(self): + """Return series rating.""" + return self.imdb_info.get('rating') + + @property + def imdb_certificates(self): + """Return series certificates.""" + return self.imdb_info.get('certificates') + + @property + def next_airdate(self): + """Return next airdate.""" + return ( + sbdatetime.convert_to_setting(network_timezones.parse_date_time(self.next_aired, self.airs, self.network)) + if try_int(self.next_aired, 1) > MILLIS_YEAR_1900 else None + ) + + @property + def genres(self): + """Return genres list.""" + return list({i for i in (self.genre or '').split('|') if i} | + {i for i in self.imdb_genres.replace('Sci-Fi', 'Science-Fiction').split('|') if i}) + + @property + def airs(self): + """Return episode time that series usually airs.""" + return self._airs + + @airs.setter + def airs(self, value): + """Set episode time that series usually airs.""" + self._airs = text_type(value).replace('am', ' AM').replace('pm', ' PM').replace(' ', ' ').strip() + + @property + def poster(self): + """Return poster path.""" + poster = ImageCache.poster_path(self.indexerid) + if os.path.isfile(poster): + return poster + + @property + def banner(self): + """Return banner path.""" + banner = ImageCache.banner_path(self.indexerid) + if os.path.isfile(banner): + return banner + + @property + def aliases(self): + """Return series aliases.""" + return self.exceptions or get_scene_exceptions(self.indexerid, self.indexer) + + @property + def release_ignore_words(self): + """Return release ignore words.""" + return [v for v in (self.rls_ignore_words or '').split(',') if v] + + @property + def release_required_words(self): + """Return release ignore words.""" + return [v for v in (self.rls_require_words or '').split(',') if v] + def flush_episodes(self): """Delete references to anything that's not in the internal lists.""" for cur_season in self.episodes: @@ -1048,9 +1269,8 @@ def make_ep_from_file(self, filepath): if not cur_ep: raise EpisodeNotFoundException except EpisodeNotFoundException: - logger.warning(u"{indexerid}: Episode not found for this show: '{show}'. " - u'Please run a full update. Skipping file: {filepath}', - indexerid=self.indexerid, show=self.name, filepath=filepath) + logger.warning(u'{indexerid}: Unable to figure out what this file is, skipping {filepath}', + indexerid=self.indexerid, filepath=filepath) continue else: @@ -1246,7 +1466,7 @@ def load_from_indexer(self, tvapi=None): self.status = getattr(indexed_show, 'status', 'Unknown') - self.plot = getattr(indexed_show, 'overview', '') or self.get_plot() + self.plot = getattr(indexed_show, 'overview', '') or self.imdb_plot self._save_externals_to_db() @@ -1273,7 +1493,7 @@ def load_imdb_info(self): # If the show has no year, IMDb returned something we don't want if not imdb_obj.year: logger.debug(u'{id}: IMDb returned invalid info for {imdb_id}, skipping update.', - id=self.indexerid, imdb_id=self.imdbid) + id=self.indexerid, imdb_id=self.imdb_id) return self.imdb_info = { @@ -1642,109 +1862,68 @@ def __unicode__(self): def to_json(self, detailed=True): """Return JSON representation.""" - indexer_name = self.indexer_slug bw_list = self.release_groups or BlackAndWhiteList(self.indexerid) - result = OrderedDict([ - ('id', OrderedDict([ - (indexer_name, self.indexerid), - ('imdb', str(self.imdb_id)) - ])), - ('title', self.name), - ('indexer', indexer_name), # e.g. tvdb - ('network', self.network), # e.g. CBS - ('type', self.classification), # e.g. Scripted - ('status', self.status), # e.g. Continuing - ('airs', text_type(self.airs).replace('am', ' AM').replace('pm', ' PM').replace(' ', ' ').strip()), - # e.g Thursday 8:00 PM - ('language', self.lang), - ('showType', 'sports' if self.is_sports else ('anime' if self.is_anime else 'series')), - ('akas', self.get_akas()), - ('year', OrderedDict([ - ('start', self.imdb_info.get('year') or self.start_year), - ])), - ('nextAirDate', self.get_next_airdate()), - ('runtime', self.imdb_info.get('runtimes') or self.runtime), - ('genres', self.get_genres()), - ('rating', OrderedDict([])), - ('classification', self.imdb_info.get('certificates')), - ('cache', OrderedDict([])), - ('countries', self.get_countries()), - ('plot', self.get_plot()), - ('config', OrderedDict([ - ('location', self.raw_location), - ('qualities', OrderedDict([ - ('allowed', self.get_allowed_qualities()), - ('preferred', self.get_preferred_qualities()), - ])), - ('paused', bool(self.paused)), - ('airByDate', bool(self.air_by_date)), - ('subtitlesEnabled', bool(self.subtitles)), - ('dvdOrder', bool(self.dvd_order)), - ('flattenFolders', bool(self.flatten_folders)), - ('scene', self.is_scene), - ('defaultEpisodeStatus', statusStrings[self.default_ep_status]), - ('aliases', self.exceptions or get_scene_exceptions(self.indexerid, self.indexer)), - ('release', OrderedDict([ - ('blacklist', bw_list.blacklist), - ('whitelist', bw_list.whitelist), - ('ignoredWords', [v for v in (self.rls_ignore_words or '').split(',') if v]), - ('requiredWords', [v for v in (self.rls_require_words or '').split(',') if v]), - ])), - ])) - ]) - - cache = image_cache.ImageCache() - if 'rating' in self.imdb_info and 'votes' in self.imdb_info: - result['rating']['imdb'] = OrderedDict([ - ('stars', self.imdb_info.get('rating')), - ('votes', self.imdb_info.get('votes')), - ]) - if os.path.isfile(cache.poster_path(self.indexerid)): - result['cache']['poster'] = cache.poster_path(self.indexerid) - if os.path.isfile(cache.banner_path(self.indexerid)): - result['cache']['banner'] = cache.banner_path(self.indexerid) + + data = NonEmptyDict() + data['id'] = NonEmptyDict() + data['id'][self.indexer_name] = self.indexerid + data['id']['imdb'] = text_type(self.imdb_id) + data['title'] = self.name + data['indexer'] = self.indexer_name # e.g. tvdb + data['network'] = self.network # e.g. CBS + data['type'] = self.classification # e.g. Scripted + data['status'] = self.status # e.g. Continuing + data['airs'] = self.airs # e.g. Thursday 8:00 PM + data['language'] = self.lang + data['showType'] = self.show_type # e.g. anime, sport, series + data['akas'] = self.imdb_akas + data['year'] = NonEmptyDict() + data['year']['start'] = self.imdb_year or self.start_year + data['nextAirDate'] = self.next_airdate + data['runtime'] = self.imdb_runtime or self.runtime + data['genres'] = self.genres + data['rating'] = NonEmptyDict() + if self.imdb_rating and self.imdb_votes: + data['rating']['imdb'] = NonEmptyDict() + data['rating']['imdb']['rating'] = self.imdb_rating + data['rating']['imdb']['votes'] = self.imdb_votes + + data['classification'] = self.imdb_certificates + data['cache'] = NonEmptyDict() + data['cache']['poster'] = self.poster + data['cache']['banner'] = self.banner + data['countries'] = self.imdb_countries + data['plot'] = self.imdb_plot or self.plot + data['config'] = NonEmptyDict() + data['config']['location'] = self.raw_location + data['config']['qualities'] = NonEmptyDict() + data['config']['qualities']['allowed'] = self.get_allowed_qualities() + data['config']['qualities']['preferred'] = self.get_preferred_qualities() + data['config']['paused'] = bool(self.paused) + data['config']['airByDate'] = bool(self.air_by_date) + data['config']['subtitlesEnabled'] = bool(self.subtitles) + data['config']['dvdOrder'] = bool(self.dvd_order) + data['config']['flattenFolders'] = bool(self.flatten_folders) + data['config']['scene'] = self.is_scene + data['config']['paused'] = bool(self.paused) + data['config']['defaultEpisodeStatus'] = self.default_ep_status_name + data['config']['aliases'] = self.aliases + data['config']['release'] = NonEmptyDict() + data['config']['release']['blacklist'] = bw_list.blacklist + data['config']['release']['whitelist'] = bw_list.whitelist + data['config']['release']['ignoredWords'] = self.release_ignore_words + data['config']['release']['requiredWords'] = self.release_required_words if detailed: - result.update(OrderedDict([ - ('seasons', OrderedDict([])) - ])) episodes = self.get_all_episodes() - result['seasons'] = [list(v) for _, v in groupby([ep.to_json() for ep in episodes], lambda item: item['season'])] - result['episodeCount'] = len(episodes) + data['seasons'] = [list(v) for _, v in + groupby([ep.to_json() for ep in episodes], lambda item: item['season'])] + data['episodeCount'] = len(episodes) last_episode = episodes[-1] if episodes else None if self.status == 'Ended' and last_episode and last_episode.airdate: - result['year']['end'] = last_episode.airdate.year - - return result - - def get_next_airdate(self): - """Return next airdate.""" - return ( - sbdatetime.convert_to_setting(network_timezones.parse_date_time(self.next_aired, self.airs, self.network)) - if try_int(self.next_aired, 1) > MILLIS_YEAR_1900 else None - ) - - def get_genres(self): - """Return genres list.""" - return list({v for v in (self.genre or '').split('|') if v} | - {v for v in self.imdb_info.get('genres', '').replace('Sci-Fi', 'Science-Fiction').split('|') if v}) - - def get_akas(self): - """Return genres akas dict.""" - akas = {} - for x in [v for v in self.imdb_info.get('akas', '').split('|') if v]: - if '::' in x: - val, key = x.split('::') - akas[key] = val - return akas + data['year']['end'] = last_episode.airdate.year - def get_countries(self): - """Return country codes.""" - return [v for v in self.imdb_info.get('country_codes', '').split('|') if v] - - def get_plot(self): - """Return show plot.""" - return self.imdb_info.get('plot', '') + return data def get_allowed_qualities(self): """Return allowed qualities.""" @@ -1966,3 +2145,35 @@ def set_all_episodes_archived(self, final_status_only=False): else: logger.debug(u'No DOWNLOADED episodes for show ID: {show}', show=self.name) return False + + def pause(self): + """Pause the series.""" + self.paused = True + self.save_to_db() + + def unpause(self): + """Unpause the series.""" + self.paused = False + self.save_to_db() + + def delete(self, remove_files): + """Delete the series.""" + try: + app.show_queue_scheduler.action.removeShow(self, bool(remove_files)) + return True + except CantRemoveShowException: + pass + + def get_asset(self, asset_type): + """Get the specified asset for this series.""" + asset_type = asset_type.lower() + media_format = ('normal', 'thumb')[asset_type in ('bannerthumb', 'posterthumb', 'small')] + + if asset_type.startswith('banner'): + return ShowBanner(self.indexerid, media_format) + elif asset_type.startswith('fanart'): + return ShowFanArt(self.indexerid, media_format) + elif asset_type.startswith('poster'): + return ShowPoster(self.indexerid, media_format) + elif asset_type.startswith('network'): + return ShowNetworkLogo(self.indexerid, media_format) diff --git a/package.json b/package.json index 72318f3d39..45eca0f816 100644 --- a/package.json +++ b/package.json @@ -12,10 +12,12 @@ "scripts": { "test-js": "xo", "test-css": "node_modules/stylelint/bin/stylelint.js 'static/css/*.css'", + "test-api": "node_modules/.bin/dredd --config dredd/dredd.yml", "security": "snyk test", "build": "node_modules/grunt-cli/bin/grunt" }, "devDependencies": { + "dredd": "^3.3.0", "grunt": "^0.4.5", "grunt-bower-concat": "^1.0.0", "grunt-bower-task": "^0.4.0", @@ -74,8 +76,8 @@ ] }, "ignoreFiles": [ - "static/css/lib/**", - "static/css/*.min.css" + "static/css/lib/**", + "static/css/*.min.css" ] }, "dependencies": { diff --git a/static/js/common/init.js b/static/js/common/init.js index b01105442c..fddfb261c8 100644 --- a/static/js/common/init.js +++ b/static/js/common/init.js @@ -6,8 +6,7 @@ MEDUSA.common.init = function() { if (MEDUSA.config.fanartBackground) { var showID = $('#showID').attr('value'); if (showID) { - let asset = 'show/' + $('#showID').attr('value') + '?type=fanart'; - let path = apiRoot + 'asset/' + asset + '&api_key=' + apiKey; + let path = apiRoot + 'series/' + $('#series_slug').attr('value') + '/asset/fanart?api_key=' + apiKey; $.backstretch(path); $('.backstretch').css('top', backstretchOffset()); $('.backstretch').css('opacity', MEDUSA.config.fanartBackgroundOpacity).fadeIn(500); diff --git a/static/js/config/index.js b/static/js/config/index.js index afcc5e68b3..def6737036 100644 --- a/static/js/config/index.js +++ b/static/js/config/index.js @@ -5,7 +5,7 @@ MEDUSA.config.index = function() { } $('#theme_name').on('change', function() { - api.patch('config', { + api.patch('config/main', { theme: { name: $(this).val() } diff --git a/static/js/core.js b/static/js/core.js index ca4a0d89de..30d67c8a13 100644 --- a/static/js/core.js +++ b/static/js/core.js @@ -35,7 +35,8 @@ var UTIL = { var body = document.body; $('[asset]').each(function() { let asset = $(this).attr('asset'); - let path = apiRoot + 'asset/' + asset + '&api_key=' + apiKey; + let series = $(this).attr('series'); + let path = apiRoot + 'series/' + series + '/asset/' + asset + '?api_key=' + apiKey; if (this.tagName.toLowerCase() === 'img') { if ($(this).attr('lazy') === 'on') { $(this).attr('data-original', path); @@ -82,7 +83,7 @@ $.fn.extend({ }); if (!document.location.pathname.endsWith('/login/')) { - api.get('config').then(function(response) { + api.get('config/main').then(function(response) { log.setDefaultLevel('trace'); $.extend(MEDUSA.config, response.data); MEDUSA.config.themeSpinner = MEDUSA.config.themeName === 'dark' ? '-dark' : ''; diff --git a/static/js/history/index.js b/static/js/history/index.js index c505ccd687..28cda51d36 100644 --- a/static/js/history/index.js +++ b/static/js/history/index.js @@ -48,7 +48,7 @@ MEDUSA.history.index = function() { }); $('.show-option select[name="layout"]').on('change', function() { - api.patch('config', { + api.patch('config/main', { layout: { history: $(this).val() } diff --git a/static/js/home/display-show.js b/static/js/home/display-show.js index 2dcf267043..b0abf10fb7 100644 --- a/static/js/home/display-show.js +++ b/static/js/home/display-show.js @@ -415,7 +415,7 @@ MEDUSA.home.displayShow = function() { // eslint-disable-line max-lines // href="home/toggleDisplayShowSpecials/?show=${show.indexerid}" $('.display-specials a').on('click', function() { - api.patch('config', { + api.patch('config/main', { layout: { show: { specials: $(this).text() !== 'Hide' diff --git a/static/js/home/edit-show.js b/static/js/home/edit-show.js index 63b34078f6..0897a64092 100644 --- a/static/js/home/edit-show.js +++ b/static/js/home/edit-show.js @@ -1,7 +1,6 @@ MEDUSA.home.editShow = function() { if (MEDUSA.config.fanartBackground) { - let asset = 'show/' + $('#showID').attr('value') + '?type=fanart'; - let path = apiRoot + 'asset/' + asset + '&api_key=' + apiKey; + let path = apiRoot + 'series/' + $('#showID').attr('value') + '/asset/fanart?api_key=' + apiKey; $.backstretch(path); $('.backstretch').css('opacity', MEDUSA.config.fanartBackgroundOpacity).fadeIn(500); } diff --git a/static/js/home/index.js b/static/js/home/index.js index 69df99a591..ee9c502f05 100644 --- a/static/js/home/index.js +++ b/static/js/home/index.js @@ -323,7 +323,7 @@ MEDUSA.home.index = function() { }); $('.show-option .show-layout').on('change', function() { - api.patch('config', { + api.patch('config/main', { layout: { home: $(this).val() } @@ -336,7 +336,7 @@ MEDUSA.home.index = function() { }); $('#showRootDir').on('change', function() { - api.patch('config', { + api.patch('config/main', { selectedRootIndex: $(this).val() }).then(function(response) { log.info(response); diff --git a/static/js/manage/backlog-overview.js b/static/js/manage/backlog-overview.js index e879fe0d74..08b9fffc53 100644 --- a/static/js/manage/backlog-overview.js +++ b/static/js/manage/backlog-overview.js @@ -60,7 +60,7 @@ MEDUSA.manage.backlogOverview = function() { }); $('#backlog_period').on('change', function() { - api.patch('config', { + api.patch('config/main', { backlogOverview: { period: $(this).val() } @@ -73,7 +73,7 @@ MEDUSA.manage.backlogOverview = function() { }); $('#backlog_status').on('change', function() { - api.patch('config', { + api.patch('config/main', { backlogOverview: { status: $(this).val() } diff --git a/static/js/manage/manage-searches.js b/static/js/manage/manage-searches.js index 68273877a4..21a27af096 100644 --- a/static/js/manage/manage-searches.js +++ b/static/js/manage/manage-searches.js @@ -7,19 +7,19 @@ MEDUSA.manage.manageSearches = function() { var status = $('#sceneExceptionStatus'); var medusaException = exceptions.data.filter(function(obj) { - return obj.id === 'medusa'; + return obj.id === 'local'; }); - var cusExceptionDate = new Date(medusaException[0].lastUpdate * 1000).toLocaleDateString(); + var cusExceptionDate = new Date(medusaException[0].lastRefresh * 1000).toLocaleDateString(); var xemException = exceptions.data.filter(function(obj) { return obj.id === 'xem'; }); - var xemExceptionDate = new Date(xemException[0].lastUpdate * 1000).toLocaleDateString(); + var xemExceptionDate = new Date(xemException[0].lastRefresh * 1000).toLocaleDateString(); var anidbException = exceptions.data.filter(function(obj) { return obj.id === 'anidb'; }); - var anidbExceptionDate = new Date(anidbException[0].lastUpdate * 1000).toLocaleDateString(); + var anidbExceptionDate = new Date(anidbException[0].lastRefresh * 1000).toLocaleDateString(); var table = $('
      ') .append( @@ -67,7 +67,7 @@ MEDUSA.manage.manageSearches = function() { // Start a spinner. updateSpinner(status, 'Retrieving scene exceptions...', true); - api.post('exceptiontype/operation', {type: 'REFRESH'}, { + api.post('alias-source/all/operation', {type: 'REFRESH'}, { timeout: 60000 }).then(function(response) { status[0].innerHTML = ''; @@ -75,7 +75,7 @@ MEDUSA.manage.manageSearches = function() { $('').text(response.data.result) ); - api.get('exceptiontype').then(function(response) { + api.get('alias-source').then(function(response) { updateExceptionTable(response); $('.forceSceneExceptionRefresh').addClass('disabled'); }).catch(function(err) { @@ -90,7 +90,7 @@ MEDUSA.manage.manageSearches = function() { }); // Initially load the exception types last updates on page load. - api.get('exceptiontype').then(function(response) { + api.get('alias-source').then(function(response) { updateExceptionTable(response); }).catch(function(err) { log.error('Trying to get scene exceptions failed with error: ' + err); diff --git a/static/js/plot-tooltip.js b/static/js/plot-tooltip.js index b707e1e397..6a0f85db37 100644 --- a/static/js/plot-tooltip.js +++ b/static/js/plot-tooltip.js @@ -1,11 +1,11 @@ $(function() { $('.plotInfo').each(function() { var match = $(this).attr('id').match(/^plot_info_([\da-z]+)_(\d+)_(\d+)$/); - // http://localhost:8081/api/v2/show/tvdb83462/s01e01/description?api_key=xxx + // http://localhost:8081/api/v2/series/tvdb83462/episode/s01e01/description?api_key=xxx $(this).qtip({ content: { text: function(event, qt) { - api.get('show/' + match[1] + '/s' + match[2] + 'e' + match[3] + '/description').then(function(response) { + api.get('series/' + match[1] + '/episode/s' + match[2] + 'e' + match[3] + '/description').then(function(response) { // Set the tooltip content upon successful retrieval qt.set('content.text', response.data); }, function(xhr) { diff --git a/static/js/quality-chooser.js b/static/js/quality-chooser.js index f219c9091e..0396961a7e 100644 --- a/static/js/quality-chooser.js +++ b/static/js/quality-chooser.js @@ -37,8 +37,8 @@ $(document).ready(function() { $('#allowed_qualities :selected').each(function(i, selected) { selectedAllowed[i] = $(selected).val(); }); - var url = 'show/' + $('#showIndexerSlug').attr('value') + - '/backlogged' + + var url = 'series/' + $('#series_slug').attr('value') + + '/legacy/backlogged' + '?allowed=' + selectedAllowed + '&preferred=' + selectedPreffered; api.get(url).then(function(response) { @@ -69,17 +69,15 @@ $(document).ready(function() { } function archiveEpisodes() { - var url = 'show/' + $('#showIndexerName').attr('value') + $('#showID').attr('value') + - '/archiveEpisodes'; - api.get(url).then(function(response) { - var archivedStatus = response.data.archived; + var url = 'series/' + $('#series_slug').attr('value') + '/operation'; + api.post(url, {type: 'ARCHIVE_EPISODES'}).then(function(response) { var html = ''; - if (archivedStatus) { - html = 'Successfuly archived episodes'; + if (response.status === 201) { + html = 'Successfully archived episodes'; // Recalculate backlogged episodes after we archive it backloggedEpisodes(); - } else { - html = 'Not episodes needed to be archived'; + } else if (response.status === 204) { + html = 'No episodes to be archived'; } $('#archivedStatus').html(html); // Restore button text diff --git a/static/js/schedule/index.js b/static/js/schedule/index.js index 04695a0008..6c682dfd12 100644 --- a/static/js/schedule/index.js +++ b/static/js/schedule/index.js @@ -62,7 +62,7 @@ MEDUSA.schedule.index = function() { }); $('.show-option select[name="layout"]').on('change', function() { - api.patch('config', { + api.patch('config/main', { layout: { schedule: $(this).val() } diff --git a/tests/apiv2/conftest.py b/tests/apiv2/conftest.py index 4dc431f8ca..c2707ecea5 100644 --- a/tests/apiv2/conftest.py +++ b/tests/apiv2/conftest.py @@ -1,6 +1,5 @@ # coding=utf-8 """Configuration for pytest.""" - from medusa.server.core import get_apiv2_handlers import pytest import tornado.web @@ -8,6 +7,8 @@ @pytest.fixture(scope='session') def app(): + from medusa import app as medusa_app + medusa_app.APP_VERSION = '0.0.0' return tornado.web.Application(get_apiv2_handlers('')) diff --git a/tests/apiv2/test_config.py b/tests/apiv2/test_config.py index 0895313e90..98fa235555 100644 --- a/tests/apiv2/test_config.py +++ b/tests/apiv2/test_config.py @@ -5,6 +5,8 @@ import sys from medusa import app, db +from medusa.helper.collections import NonEmptyDict + import pytest from tornado.httpclient import HTTPError @@ -17,118 +19,107 @@ def config(monkeypatch, app_config): app_config('LOCALE', (None, 'ABC')) app_locale = 'Unknown.ABC' - return { - 'anonRedirect': app.ANON_REDIRECT, - 'animeSplitHome': app.ANIME_SPLIT_HOME, - 'comingEpsSort': app.COMING_EPS_SORT, - 'datePreset': app.DATE_PRESET, - 'fuzzyDating': app.FUZZY_DATING, - 'themeName': app.THEME_NAME, - 'posterSortby': app.POSTER_SORTBY, - 'posterSortdir': app.POSTER_SORTDIR, - 'rootDirs': app.ROOT_DIRS, - 'sortArticle': app.SORT_ARTICLE, - 'timePreset': app.TIME_PRESET, - 'trimZero': app.TRIM_ZERO, - 'fanartBackground': app.FANART_BACKGROUND, - 'fanartBackgroundOpacity': 0 if app.FANART_BACKGROUND_OPACITY is None else float(app.FANART_BACKGROUND_OPACITY), - 'branch': app.BRANCH, - 'commitHash': app.CUR_COMMIT_HASH, - 'release': app.APP_VERSION, - 'sslVersion': app.OPENSSL_VERSION, - 'pythonVersion': python_version, - 'databaseVersion': { - 'major': app.MAJOR_DB_VERSION, - 'minor': app.MINOR_DB_VERSION - }, - 'os': platform.platform(), - 'locale': app_locale, - 'localUser': os_user, - 'programDir': app.PROG_DIR, - 'configFile': app.CONFIG_FILE, - 'dbFilename': db.dbFilename(), - 'cacheDir': app.CACHE_DIR, - 'logDir': app.LOG_DIR, - 'appArgs': app.MY_ARGS, - 'webRoot': app.WEB_ROOT, - 'githubUrl': app.GITHUB_IO_URL, - 'wikiUrl': app.WIKI_URL, - 'sourceUrl': app.APPLICATION_URL, - 'downloadUrl': app.DOWNLOAD_URL, - 'subtitlesMulti': app.SUBTITLES_MULTI, - 'namingForceFolders': app.NAMING_FORCE_FOLDERS, - 'subtitles': { - 'enabled': bool(app.USE_SUBTITLES) - }, - 'kodi': { - 'enabled': bool(app.USE_KODI and app.KODI_UPDATE_LIBRARY) - }, - 'plex': { - 'server': { - 'enabled': bool(app.USE_PLEX_SERVER), - 'notify': { - 'snatch': bool(app.PLEX_NOTIFY_ONSNATCH), - 'download': bool(app.PLEX_NOTIFY_ONDOWNLOAD), - 'subtitleDownload': bool(app.PLEX_NOTIFY_ONSUBTITLEDOWNLOAD) - }, - 'updateLibrary': bool(app.PLEX_UPDATE_LIBRARY), - 'host': app.PLEX_SERVER_HOST, - 'token': app.PLEX_SERVER_TOKEN, - 'username': app.PLEX_SERVER_USERNAME, - 'password': app.PLEX_SERVER_PASSWORD - }, - 'client': { - 'enabled': bool(app.USE_PLEX_CLIENT), - 'username': app.PLEX_CLIENT_USERNAME, - 'password': app.PLEX_CLIENT_PASSWORD, - 'host': app.PLEX_CLIENT_HOST - } - }, - 'emby': { - 'enabled': bool(app.USE_EMBY) - }, - 'torrents': { - 'enabled': bool(app.USE_TORRENTS), - 'method': app.TORRENT_METHOD, - 'username': app.TORRENT_USERNAME, - 'password': app.TORRENT_PASSWORD, - 'label': app.TORRENT_LABEL, - 'labelAnime': app.TORRENT_LABEL_ANIME, - 'verifySSL': app.TORRENT_VERIFY_CERT, - 'path': app.TORRENT_PATH, - 'seedTime': app.TORRENT_SEED_TIME, - 'paused': app.TORRENT_PAUSED, - 'highBandwidth': app.TORRENT_HIGH_BANDWIDTH, - 'host': app.TORRENT_HOST, - 'rpcurl': app.TORRENT_RPCURL, - 'authType': app.TORRENT_AUTH_TYPE - }, - 'nzb': { - 'enabled': bool(app.USE_NZBS), - 'username': app.NZBGET_USERNAME, - 'password': app.NZBGET_PASSWORD, - # app.NZBGET_CATEGORY - # app.NZBGET_CATEGORY_BACKLOG - # app.NZBGET_CATEGORY_ANIME - # app.NZBGET_CATEGORY_ANIME_BACKLOG - 'host': app.NZBGET_HOST, - 'priority': app.NZBGET_PRIORITY - }, - 'layout': { - 'schedule': app.COMING_EPS_LAYOUT, - 'history': app.HISTORY_LAYOUT, - 'home': app.HOME_LAYOUT, - 'show': { - 'allSeasons': bool(app.DISPLAY_ALL_SEASONS), - 'specials': bool(app.DISPLAY_SHOW_SPECIALS) - } - }, - 'selectedRootIndex': app.SELECTED_ROOT, - 'backlogOverview': { - 'period': app.BACKLOG_PERIOD, - 'status': app.BACKLOG_STATUS - } - } + config_data = NonEmptyDict() + config_data['anonRedirect'] = app.ANON_REDIRECT + config_data['animeSplitHome'] = app.ANIME_SPLIT_HOME + config_data['comingEpsSort'] = app.COMING_EPS_SORT + config_data['datePreset'] = app.DATE_PRESET + config_data['fuzzyDating'] = app.FUZZY_DATING + config_data['themeName'] = app.THEME_NAME + config_data['posterSortby'] = app.POSTER_SORTBY + config_data['posterSortdir'] = app.POSTER_SORTDIR + config_data['rootDirs'] = app.ROOT_DIRS + config_data['sortArticle'] = app.SORT_ARTICLE + config_data['timePreset'] = app.TIME_PRESET + config_data['trimZero'] = app.TRIM_ZERO + config_data['fanartBackground'] = app.FANART_BACKGROUND + config_data['fanartBackgroundOpacity'] = float(app.FANART_BACKGROUND_OPACITY or 0) + config_data['branch'] = app.BRANCH + config_data['commitHash'] = app.CUR_COMMIT_HASH + config_data['release'] = app.APP_VERSION + config_data['sslVersion'] = app.OPENSSL_VERSION + config_data['pythonVersion'] = sys.version + config_data['databaseVersion'] = NonEmptyDict() + config_data['databaseVersion']['major'] = app.MAJOR_DB_VERSION + config_data['databaseVersion']['minor'] = app.MINOR_DB_VERSION + config_data['os'] = platform.platform() + config_data['locale'] = app_locale + config_data['localUser'] = os_user + config_data['programDir'] = app.PROG_DIR + config_data['configFile'] = app.CONFIG_FILE + config_data['dbFilename'] = db.dbFilename() + config_data['cacheDir'] = app.CACHE_DIR + config_data['logDir'] = app.LOG_DIR + config_data['appArgs'] = app.MY_ARGS + config_data['webRoot'] = app.WEB_ROOT + config_data['githubUrl'] = app.GITHUB_IO_URL + config_data['wikiUrl'] = app.WIKI_URL + config_data['sourceUrl'] = app.APPLICATION_URL + config_data['downloadUrl'] = app.DOWNLOAD_URL + config_data['subtitlesMulti'] = app.SUBTITLES_MULTI + config_data['namingForceFolders'] = app.NAMING_FORCE_FOLDERS + config_data['subtitles'] = NonEmptyDict() + config_data['subtitles']['enabled'] = bool(app.USE_SUBTITLES) + config_data['kodi'] = NonEmptyDict() + config_data['kodi']['enabled'] = bool(app.USE_KODI and app.KODI_UPDATE_LIBRARY) + config_data['plex'] = NonEmptyDict() + config_data['plex']['server'] = NonEmptyDict() + config_data['plex']['server']['enabled'] = bool(app.USE_PLEX_SERVER) + config_data['plex']['server']['notify'] = NonEmptyDict() + config_data['plex']['server']['notify']['snatch'] = bool(app.PLEX_NOTIFY_ONSNATCH) + config_data['plex']['server']['notify']['download'] = bool(app.PLEX_NOTIFY_ONDOWNLOAD) + config_data['plex']['server']['notify']['subtitleDownload'] = bool(app.PLEX_NOTIFY_ONSUBTITLEDOWNLOAD) + + config_data['plex']['server']['updateLibrary'] = bool(app.PLEX_UPDATE_LIBRARY) + config_data['plex']['server']['host'] = app.PLEX_SERVER_HOST + config_data['plex']['server']['token'] = app.PLEX_SERVER_TOKEN + config_data['plex']['server']['username'] = app.PLEX_SERVER_USERNAME + config_data['plex']['server']['password'] = app.PLEX_SERVER_PASSWORD + config_data['plex']['client'] = NonEmptyDict() + config_data['plex']['client']['enabled'] = bool(app.USE_PLEX_CLIENT) + config_data['plex']['client']['username'] = app.PLEX_CLIENT_USERNAME + config_data['plex']['client']['password'] = app.PLEX_CLIENT_PASSWORD + config_data['plex']['client']['host'] = app.PLEX_CLIENT_HOST + config_data['emby'] = NonEmptyDict() + config_data['emby']['enabled'] = bool(app.USE_EMBY) + config_data['torrents'] = NonEmptyDict() + config_data['torrents']['enabled'] = bool(app.USE_TORRENTS) + config_data['torrents']['method'] = app.TORRENT_METHOD + config_data['torrents']['username'] = app.TORRENT_USERNAME + config_data['torrents']['password'] = app.TORRENT_PASSWORD + config_data['torrents']['label'] = app.TORRENT_LABEL + config_data['torrents']['labelAnime'] = app.TORRENT_LABEL_ANIME + config_data['torrents']['verifySSL'] = app.TORRENT_VERIFY_CERT + config_data['torrents']['path'] = app.TORRENT_PATH + config_data['torrents']['seedTime'] = app.TORRENT_SEED_TIME + config_data['torrents']['paused'] = app.TORRENT_PAUSED + config_data['torrents']['highBandwidth'] = app.TORRENT_HIGH_BANDWIDTH + config_data['torrents']['host'] = app.TORRENT_HOST + config_data['torrents']['rpcurl'] = app.TORRENT_RPCURL + config_data['torrents']['authType'] = app.TORRENT_AUTH_TYPE + config_data['nzb'] = NonEmptyDict() + config_data['nzb']['enabled'] = bool(app.USE_NZBS) + config_data['nzb']['username'] = app.NZBGET_USERNAME + config_data['nzb']['password'] = app.NZBGET_PASSWORD + # app.NZBGET_CATEGORY + # app.NZBGET_CATEGORY_BACKLOG + # app.NZBGET_CATEGORY_ANIME + # app.NZBGET_CATEGORY_ANIME_BACKLOG + config_data['nzb']['host'] = app.NZBGET_HOST + config_data['nzb']['priority'] = app.NZBGET_PRIORITY + config_data['layout'] = NonEmptyDict() + config_data['layout']['schedule'] = app.COMING_EPS_LAYOUT + config_data['layout']['history'] = app.HISTORY_LAYOUT + config_data['layout']['home'] = app.HOME_LAYOUT + config_data['layout']['show'] = NonEmptyDict() + config_data['layout']['show']['allSeasons'] = bool(app.DISPLAY_ALL_SEASONS) + config_data['layout']['show']['specials'] = bool(app.DISPLAY_SHOW_SPECIALS) + config_data['selectedRootIndex'] = int(app.SELECTED_ROOT) if app.SELECTED_ROOT else None + config_data['backlogOverview'] = NonEmptyDict() + config_data['backlogOverview']['period'] = app.BACKLOG_PERIOD + config_data['backlogOverview']['status'] = app.BACKLOG_STATUS + + return config_data @pytest.mark.gen_test @@ -136,7 +127,7 @@ def test_config_get(http_client, create_url, auth_headers, config): # given expected = config - url = create_url('/config') + url = create_url('/config/main') # when response = yield http_client.fetch(url, **auth_headers) @@ -157,7 +148,7 @@ def test_config_get(http_client, create_url, auth_headers, config): def test_config_get_detailed(http_client, create_url, auth_headers, config, query): # given expected = config[query] - url = create_url('/config/{0}/'.format(query)) + url = create_url('/config/main/{0}/'.format(query)) # when response = yield http_client.fetch(url, **auth_headers) @@ -168,7 +159,20 @@ def test_config_get_detailed(http_client, create_url, auth_headers, config, quer @pytest.mark.gen_test -def test_config_get_detailed_not_found(http_client, create_url, auth_headers): +def test_config_get_detailed_bad_request(http_client, create_url, auth_headers): + # given + url = create_url('/config/main/abcdef/') + + # when + with pytest.raises(HTTPError) as error: + yield http_client.fetch(url, **auth_headers) + + # then + assert 400 == error.value.code + + +@pytest.mark.gen_test +def test_config_get_not_found(http_client, create_url, auth_headers): # given url = create_url('/config/abcdef/') diff --git a/tests/apiv2/test_log.py b/tests/apiv2/test_log.py index 64456b2768..224ae1d34d 100644 --- a/tests/apiv2/test_log.py +++ b/tests/apiv2/test_log.py @@ -55,7 +55,7 @@ def test_log_get_pagination(http_client, create_url, auth_headers, logger, commi @pytest.mark.gen_test def test_log_post(monkeypatch, http_client, create_url, auth_headers, logger, read_loglines): # given - monkeypatch.setattr(log, 'logger', logger) + monkeypatch.setattr(log, 'log', logger) url = create_url('/log') body = { 'message': 'Some %s {here}', diff --git a/tests/apiv2/test_show.py b/tests/apiv2/test_series.py similarity index 81% rename from tests/apiv2/test_show.py rename to tests/apiv2/test_series.py index 79340f3eb3..089ab95356 100644 --- a/tests/apiv2/test_show.py +++ b/tests/apiv2/test_series.py @@ -6,9 +6,9 @@ @pytest.mark.gen_test -def test_show_get_no_shows(http_client, create_url, auth_headers): +def test_show_get_no_series(http_client, create_url, auth_headers): # given - url = create_url('/show') + url = create_url('/series') # when response = yield http_client.fetch(url, **auth_headers) diff --git a/tests/test_server_base.py b/tests/test_server_base.py new file mode 100644 index 0000000000..005a3e4a5c --- /dev/null +++ b/tests/test_server_base.py @@ -0,0 +1,133 @@ +# coding=utf-8 +"""Tests for medusa.tv identifiers.""" + +import re +from medusa.server.api.v2.base import BaseRequestHandler +import pytest + + +@pytest.mark.parametrize('p', [ + { # p0 + 'url': '/foo/bar/123/a1b2c3/done', + 'expected': { + 'path1': 'bar', + 'path2': '123', + 'path3': 'a1b2c3', + 'path4': 'done', + } + }, + { # p1 + 'url': '/foo/bar/123/a1b2c3/done/', + 'expected': { + 'path1': 'bar', + 'path2': '123', + 'path3': 'a1b2c3', + 'path4': 'done', + }, + }, + { # p2 + 'url': '/foo', + 'expected': { + 'path1': None, + 'path2': None, + 'path3': None, + 'path4': None, + }, + }, + { # p3 + 'url': '/foo/', + 'expected': { + 'path1': None, + 'path2': None, + 'path3': None, + 'path4': None, + }, + }, + { # p4 + 'url': '/foo/bar', + 'expected': { + 'path1': 'bar', + 'path2': None, + 'path3': None, + 'path4': None, + }, + }, + { # p5 + 'url': '/foo/bar/', + 'expected': { + 'path1': 'bar', + 'path2': None, + 'path3': None, + 'path4': None, + }, + }, + { # p6 + 'url': '/foo/bar/123', + 'expected': { + 'path1': 'bar', + 'path2': '123', + 'path3': None, + 'path4': None, + }, + }, + { # p7 + 'url': '/foo/bar/123/', + 'expected': { + 'path1': 'bar', + 'path2': '123', + 'path3': None, + 'path4': None, + }, + }, + { # p8 + 'url': '/foo/bar/123/a1b2c3', + 'expected': { + 'path1': 'bar', + 'path2': '123', + 'path3': 'a1b2c3', + 'path4': None, + }, + }, + { # p9 + 'url': '/foo/bar/123/a1b2c3/', + 'expected': { + 'path1': 'bar', + 'path2': '123', + 'path3': 'a1b2c3', + 'path4': None, + }, + }, + { # p10 + 'url': '/foo/bar/123/a1b2c3/done//', + 'expected': None, + }, + { # p11 + 'url': '/foo/bar/123/a1b2c3/done1', + 'expected': None, + }, + { # p12 + 'url': '/foo/bar/123/a1b2c3/done/more', + 'expected': None, + }, +]) +def test_match_url(p): + # Given + sut = BaseRequestHandler + resource = 'foo' + paths = [ + ('path1', r'[a-z]+'), + ('path2', r'\d+'), + ('path3', r'\w+'), + ('path4', r'[a-z]+'), + ] + + regex = re.compile(sut.create_url('', resource, *paths)) + url = p['url'] + expected = p['expected'] + + # When + m = regex.match(url) + actual = m.groupdict() if m else None + + # Then + assert expected == actual diff --git a/tests/test_tv_identifiers.py b/tests/test_tv_identifiers.py new file mode 100644 index 0000000000..a5a4fa7a98 --- /dev/null +++ b/tests/test_tv_identifiers.py @@ -0,0 +1,186 @@ +# coding=utf-8 +"""Tests for medusa.tv identifiers.""" + +from datetime import datetime + +from medusa.tv.episode import AbsoluteNumber, AirByDateNumber, EpisodeNumber, RelativeNumber +from medusa.tv.indexer import Indexer +from medusa.tv.series import SeriesIdentifier +import pytest + + +@pytest.mark.parametrize('p', [ + { # p0: tvdb + 'slug': 'tvdb', + 'expected': Indexer(1), + }, + { # p1: tvmaze + 'slug': 'tvmaze', + 'expected': Indexer(3), + }, + { # p2: tmdb + 'slug': 'tmdb', + 'expected': Indexer(4), + }, + { # p3: invalid one + 'slug': 'another', + 'expected': None, + } +]) +def test_indexer_identifier(p): + # Given + slug = p['slug'] + expected = p['expected'] + + # When + actual = Indexer.from_slug(slug) + + # Then + if expected is None: + assert actual is None + else: + assert actual + assert expected == actual + assert Indexer(expected.id + 1) != actual + assert expected.id != actual + + +@pytest.mark.parametrize('p', [ + { # p0: tvdb + 'slug': 'tvdb1234', + 'expected': SeriesIdentifier(Indexer(1), 1234), + }, + { # p1: tvmaze + 'slug': 'tvmaze567', + 'expected': SeriesIdentifier(Indexer(3), 567), + }, + { # p2: tmdb + 'slug': 'tmdb89', + 'expected': SeriesIdentifier(Indexer(4), 89), + }, + { # p3: invalid one + 'slug': 'another1122', + 'expected': None, + } +]) +def test_series_identifier(p): + # Given + slug = p['slug'] + expected = p['expected'] + + # When + actual = SeriesIdentifier.from_slug(slug) + + # Then + if expected is None: + assert actual is None + else: + assert actual + assert expected == actual + assert expected.id == actual.id + assert expected.indexer == actual.indexer + assert expected.id != actual + assert expected.indexer != actual + + +@pytest.mark.parametrize('p', [ + { # p0: s1 + 'slug': 's1', + 'expected': None, + }, + { # p1: s01 + 'slug': 's01', + 'expected': None, + }, + { # p2: S01 + 'slug': 'S01', + 'expected': None, + }, + { # p3: s12 + 'slug': 's12', + 'expected': None, + }, + { # p4: s123 + 'slug': 's123', + 'expected': None, + }, + { # p5: s1234 + 'slug': 's1234', + 'expected': None, + }, + { # p6: s12345 + 'slug': 's12345', + 'expected': None, + }, + { # p7: e2 + 'slug': 'e2', + 'expected': AbsoluteNumber(2), + }, + { # p8: e02 + 'slug': 'e02', + 'expected': AbsoluteNumber(2), + }, + { # p9: e12 + 'slug': 'e12', + 'expected': AbsoluteNumber(12), + }, + { # p10: e123 + 'slug': 'e123', + 'expected': AbsoluteNumber(123), + }, + { # p11: e1234 + 'slug': 'e1234', + 'expected': None, + }, + { # p12: E15 + 'slug': 'E15', + 'expected': AbsoluteNumber(15), + }, + { # p13: s01e02 + 'slug': 's01e02', + 'expected': RelativeNumber(1, 2), + }, + { # p14: s2017e02 + 'slug': 's2017e02', + 'expected': RelativeNumber(2017, 2), + }, + { # p15: 2017-07-16 + 'slug': '2017-07-16', + 'expected': AirByDateNumber(datetime(year=2017, month=7, day=16)), + }, + { # p16: 2017-17-16 (invalid date) + 'slug': '2017-17-16', + 'expected': None, + }, + { # p17: Invalid + 'slug': 's01e022017-07-16', + 'expected': None, + }, + { # p18: Invalid + 'slug': '22017-07-16', + 'expected': None, + }, + { # p19: Invalid + 'slug': 'ss01', + 'expected': None, + }, + { # p20: Invalid + 'slug': 'ee01', + 'expected': None, + }, +]) +def test_episode_identifier(p): + # Given + slug = p['slug'] + expected = p['expected'] + + # When + actual = EpisodeNumber.from_slug(slug) + + # Then + if expected is None: + assert not actual + else: + assert actual + assert expected == actual + assert slug != actual diff --git a/tox.ini b/tox.ini index 04477a05d1..c83927eb3b 100644 --- a/tox.ini +++ b/tox.ini @@ -6,8 +6,10 @@ envlist = py27 passenv = TRAVIS TRAVIS_JOB_ID TRAVIS_BRANCH envdir = {toxworkdir}/tox deps = + dredd_hooks flake8_docstrings flake8-import-order + mock pycodestyle<2.1.0 pydocstyle!=1.1.0 pep8-naming @@ -16,7 +18,6 @@ deps = pytest-flake8 pytest-tornado PyYAML - mock commands = py.test -p no:cov -p no:unittest --flake8 {posargs} -k 'not dredd_hook.py' -k 'not node_modules' diff --git a/views/displayShow.mako b/views/displayShow.mako index ccf7786f77..a72c1838fd 100644 --- a/views/displayShow.mako +++ b/views/displayShow.mako @@ -20,6 +20,7 @@ <%block name="content"> <%namespace file="/inc_defs.mako" import="renderQualityPill"/> +
      diff --git a/views/editShow.mako b/views/editShow.mako index dcf79058a5..bef59044c4 100644 --- a/views/editShow.mako +++ b/views/editShow.mako @@ -20,8 +20,8 @@ % endif <%block name="content"> + - % if not header is UNDEFINED:

      ${header}

      % else: diff --git a/views/history.mako b/views/history.mako index 5d416061fa..261311cc11 100644 --- a/views/history.mako +++ b/views/history.mako @@ -16,7 +16,8 @@ <%block name="content"> <%namespace file="/inc_defs.mako" import="renderQualityPill"/> - + +
      % if not header is UNDEFINED: diff --git a/views/home.mako b/views/home.mako index fc5fbecbe9..dcd5608285 100644 --- a/views/home.mako +++ b/views/home.mako @@ -12,7 +12,11 @@ <%block name="content"> - +<%! + random_show = choice(app.showList) if app.showList else None +%> + +
      diff --git a/views/partials/home/banner.mako b/views/partials/home/banner.mako index ad31cb1c2c..3227866c2d 100644 --- a/views/partials/home/banner.mako +++ b/views/partials/home/banner.mako @@ -139,13 +139,13 @@ ${cur_show.name} % if cur_show.network: - ${cur_show.network} + ${cur_show.network} ${cur_show.network} % else: No Network diff --git a/views/partials/home/poster.mako b/views/partials/home/poster.mako index 5850111aea..098e3b6353 100644 --- a/views/partials/home/poster.mako +++ b/views/partials/home/poster.mako @@ -84,7 +84,7 @@
      - +